index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
985,900 | 6e86db5bc2ef7b7af07fe7a7c7dfb4ecebfc7a00 | def balance_for_positive_class(positives_table, metrics, maximum_acceptable_difference):
satisfies_balance_for_positive_class = True
groups_amount = len(positives_table)
probabilities_values = positives_table[0].keys()
expected_values = [0] * groups_amount
for i in range(groups_amount):
for s in probabilities_values:
if metrics[i]["TP"]+metrics[i]["FN"] > 0:
expected_values[i] += (float(s) * positives_table[i][s]/(metrics[i]["TP"]+metrics[i]["FN"]))
sorted_expected_values = sorted(expected_values)
if sorted_expected_values[groups_amount - 1] - sorted_expected_values[0] > maximum_acceptable_difference:
satisfies_balance_for_positive_class = False
return satisfies_balance_for_positive_class, expected_values
|
985,901 | 7d4c8ed4513c8b9ca8066bcd14bac329e15dc039 | from rest_framework import serializers
from django.contrib.auth import authenticate
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User
from rest_framework import exceptions
from Kameteeapp.models import *
# user register
class RegisterSerializer(serializers.Serializer):
MobileNumber = serializers.CharField()
Firstname = serializers.CharField()
LastName = serializers.CharField()
Password = serializers.CharField()
def validate(self, data):
username = data.get("MobileNumber", "")
Firstname = data.get("Firstname", "")
LastName = data.get("LastName", "")
Email = ''
Password = data.get("Password", "")
userexist = User.objects.filter(username = username).count()
#check weather use is alreday exit or not
if userexist != 0:
msg = "User already exist with given mobile number"
raise exceptions.ValidationError(msg)
if username and Password and Firstname and LastName:
user = User(username=username, password=Password,
first_name=Firstname,last_name=LastName,email=Email)
user.set_password(Password)
user.save()
userdeatil = UserDetails(User=user)
userdeatil.save()
UserDetailphoto = ProfilePic(User=user,ProfilePic='')
UserDetailphoto.save()
data['user']=user
else:
msg = "Must provide username and password FirstName and LastName both."
raise exceptions.ValidationError(msg)
return data
# user Login
class LoginSerializer(serializers.Serializer):
MobileNumber = serializers.CharField()
Password = serializers.CharField()
def validate(self, data):
MobileNumber = data.get("MobileNumber", "")
Password = data.get("Password", "")
if MobileNumber and Password:
user = authenticate(username=MobileNumber, password=Password)
if user is not None:
if user.is_active:
data["user"] = user
else:
msg = "User is deactivated. "
raise ValueError(msg)
else:
msg = "Unable to login with given credentials."
raise ValueError(msg)
else:
msg = "Must provide username and password both."
raise ValueError(msg)
return data
class AddGroupUserSerializer(serializers.Serializer):
GroupID = serializers.IntegerField()
MobileNumber = serializers.IntegerField()
UserName = serializers.CharField()
def validate(self,data):
GroupID = data.get("GroupID", "")
Mobilenumber = data.get("MobileNumber", "")
UserName = data.get("UserName", "")
#user_id = self.context["user_id"]
groupuser = UserGroup.objects.get(id=GroupID)
total = groupuser.usercount
isactviegroup = groupuser.isActive
Status = groupuser.groupStatus
Totalcount = GroupMember.objects.filter(UserGroup=GroupID).count()
if Status != 5:
msg = "Group is no longer in open state"
raise ValueError(msg)
elif isactviegroup == 0:
msg = "Group is no longer active"
raise ValueError(msg)
elif total == Totalcount:
msg = "Group Member count filled"
raise ValueError(msg)
elif GroupMember.objects.filter(UserGroup=GroupID,UserName=UserName).exists():
msg = "Name and Mobile Number both should be unique."
raise ValueError(msg)
elif GroupMember.objects.filter(UserGroup=GroupID,Mobilenumber=Mobilenumber).exists():
msg = "Name and Mobile Number both should be unique."
raise ValueError(msg)
elif GroupID and Mobilenumber:
Group = UserGroup.objects.get(id=GroupID)
NewGroupUser =GroupMember(UserGroup=Group,Mobilenumber=Mobilenumber,UserName=UserName)
NewGroupUser.save()
msg = "User Added successfully"
return data
else:
msg = "Must provide all required field."
raise exceptions.ValidationError(msg)
return data
class UserGroupSerializer(serializers.ModelSerializer):
class Meta:
model = UserGroup
fields = [
"id",
"groupname",
"startDate",
"usercount",
"createBy",
"isActive",
"AmountPerUser",
"sarkriGhata",
"groupStatus",
"groupbiddingtype",
"biddgingCycle"
]
class GroupMemberSerializer(serializers.ModelSerializer):
class Meta:
model = GroupMember
fields = [
"id",
"UserGroup",
"Mobilenumber",
"UserName",
"isAdmin"
]
class GroupBiddingSerializer(serializers.ModelSerializer):
class Meta:
model = GroupBidding
fields = [
"id",
"UserGroup",
"ActualAmount",
"selectedName",
"SelectedMobileNumber",
"biddingAmount",
"IsSelected"
]
class StatEndGroupUserSerializer(serializers.ModelSerializer):
class Meta:
model = UserGroup
fields = [
"id",
"groupname",
"startDate",
"usercount",
"createBy",
"isActive",
"AmountPerUser",
"sarkriGhata",
"groupbiddingtype",
"groupStatus",
"biddingdate",
"biddgingCycle",
"biddingflag"
]
class GroupBiddingEntriesSerializer(serializers.ModelSerializer):
class Meta:
model = GroupBiddingEntries
fields = [
"id",
"GroupBidding",
"TotalAmount",
"MinCyclelossAmount",
"BidlossAmount",
"selectedName",
"SelectedMobileNumber",
"Cyclenumber",
"AddedBy",
"created_at",
"IsSelected"
]
class UserDetailsSerializer(serializers.ModelSerializer):
#ProfilePic = serializers.ImageField(max_length=None, use_url=True)
class Meta:
model = UserDetails
fields = ['DateofBirth', 'AlternateMobileNumber']
class ProfilePicSerializer(serializers.ModelSerializer):
#ProfilePic = serializers.ImageField(max_length=None, use_url=True)
class Meta:
model = ProfilePic
fields = ['ProfilePic']
class ProfileSerializer(serializers.ModelSerializer):
#UserDetails = UserDetailsSerializer()
class Meta:
model = User
fields = ('username', 'first_name',
'last_name', 'email',
'is_staff', 'is_active', 'date_joined',
'is_superuser')
class GroupPaymentHistorySerializer(serializers.ModelSerializer):
#UserDetails = UserDetailsSerializer()
class Meta:
model = GroupPaymentHistory
fields = ('id', 'GroupBidding',
'Mobilenumber', 'UserName', 'ActualAmount',
'AmountPaid', 'AmountDue', 'Cyclenumber',
'IsReceived','Status','RecivedDate')
class GroupAmountRecivedSerializer(serializers.ModelSerializer):
class Meta:
model = AmountRecived
fields = ('id', 'ActualAmount','BiddingAmount','MinlossAmount',
'ActualRecived', 'Cyclenumber', 'Amountsend',
'RevicerName', 'Recivermobile', 'RecivedDate')
class GroupMessageSerializer(serializers.ModelSerializer):
class Meta:
model = GroupMessage
fields = ('id', 'UserGroup',
'UserName', 'UserMobile', 'MessageDescription',
'created_at')
|
985,902 | 7882735e9a32934a080b9a4d1f9921f87807fbbe | # ==============================================================================================
# Plivo Assignment
# Author :: Jitender Singh Rana
# mail id :: jeet841991@gmail.com
# Date :: 03-Dec-2018
# ===============================================================================================
"""
Class containing methods for Login Page
click_create_an_app : Method to click on create app button
enter_username : Method to enter Username
enter_password : Method to enter password
click_login_button : Method to click on login button
"""
import time
class LoginPage(object):
'''
This class contains all methods for login page
'''
def __init__(self, driver, log):
"""Initializing method for LoginPage class"""
if not driver:
raise Exception('driver not provided')
self.driver = driver
self.log = log
def click_create_an_app(self):
"""Method to click on create an app option"""
self.log.info('clicking on create app button')
button = self.driver.find_element_by_xpath("//a[contains(text(),'Create an App')]")
button.click()
time.sleep(5)
def enter_username(self):
"""Method to enter username"""
def enter_password(self):
"""Method to enter password"""
def click_login_button(self):
"""Method to click on login button"""
|
985,903 | 16867a2a55c9496fc364b7f89ef3023fbb23d262 | #!/usr/bin/env python3
from configparser import ConfigParser
import phishfry
import unittest
config = ConfigParser()
config.read("/opt/phishfry/config.ini")
user = config["test"]["user"]
password = config["test"]["pass"]
account = phishfry.Account(user, password)
class TestPhishfry(unittest.TestCase):
def test_remediate_forward_to_group(self):
# restore first in case deleted
account.Restore("test@integraldefense.com", "<CAAoaDjT=8xPVW6e=yyv2eji7rzUMxPwnv6uMJJVzYbFK=LPCVw@mail.gmail.com>", True)
# test deleting email that was forwarded to group
results = account.Remove("test@integraldefense.com", "<CAAoaDjT=8xPVW6e=yyv2eji7rzUMxPwnv6uMJJVzYbFK=LPCVw@mail.gmail.com>", True)
self.assertIn("test@integraldefense.com", results)
self.assertTrue(results["test@integraldefense.com"].success)
self.assertIn("testinggroupemail@integraldefense.com", results)
self.assertTrue(results["testinggroupemail@integraldefense.com"].success)
# test restoring email that was forwarded to group
results = account.Restore("test@integraldefense.com", "<CAAoaDjT=8xPVW6e=yyv2eji7rzUMxPwnv6uMJJVzYbFK=LPCVw@mail.gmail.com>", True)
self.assertIn("test@integraldefense.com", results)
self.assertTrue(results["test@integraldefense.com"].success)
self.assertIn("testinggroupemail@integraldefense.com", results)
self.assertTrue(results["testinggroupemail@integraldefense.com"].success)
def test_remediate_non_existent_message(self):
# restore first in case it is deleted
account.Restore("test@integraldefense.com", "<non-existent-message-id>")
# test deleting non existent message
results = account.Remove("test@integraldefense.com", "<non-existent-message-id>")
self.assertIn("test@integraldefense.com", results)
self.assertTrue(results["test@integraldefense.com"].success)
self.assertEqual(results["test@integraldefense.com"].message, "Message not found")
# test restoring non existent message
results = account.Restore("test@integraldefense.com", "<non-existent-message-id>")
self.assertIn("test@integraldefense.com", results)
self.assertFalse(results["test@integraldefense.com"].success)
self.assertEqual(results["test@integraldefense.com"].message, "Message not found")
def test_remediate_reply_to_external_mailbox(self):
# restore first in case deleted
account.Restore("test@integraldefense.com", "<CAAoaDjQJ3Kor1nZMPJwEN56KK0pBDxyjhJjR-Hgj7ZA85hKy-w@mail.gmail.com>")
# test deleting email that was forwarded to external mailbox
results = account.Remove("test@integraldefense.com", "<CAAoaDjQJ3Kor1nZMPJwEN56KK0pBDxyjhJjR-Hgj7ZA85hKy-w@mail.gmail.com>")
self.assertIn("test@integraldefense.com", results)
self.assertTrue(results["test@integraldefense.com"].success)
# test restoring email that was forwarded to external mailbox
results = account.Restore("test@integraldefense.com", "<CAAoaDjQJ3Kor1nZMPJwEN56KK0pBDxyjhJjR-Hgj7ZA85hKy-w@mail.gmail.com>")
self.assertIn("test@integraldefense.com", results)
self.assertTrue(results["test@integraldefense.com"].success)
def test_resolve_alias(self):
mailbox = account.GetMailbox("test@integraldefense.onmicrosoft.com")
self.assertEqual(mailbox.address, "test@integraldefense.com")
def test_resolve_non_existent_email(self):
mailbox = account.GetMailbox("non_existent@integraldefense.com")
self.assertEqual(mailbox, None)
def test_expand_distribution_list(self):
mailbox = account.GetMailbox("testemaillist@integraldefense.com")
members = mailbox.Expand()
self.assertEqual(len(members), 2)
def test_get_group_owner(self):
mailbox = account.GetMailbox("testinggroupemail@integraldefense.com")
owner = mailbox.GetOwner()
self.assertEqual(owner.group.address, "testinggroupemail@integraldefense.com")
if __name__ == '__main__':
unittest.main(verbosity=2)
|
985,904 | 6755f0c4cba6736e83f0b1dcf7673ccf5d3dc33c | import requests
class Snap:
def __init__(self, userName, passWord, loginWithMobile):
''' Initialize
Initializes the class.
'''
self.api_url = 'https://mobileapi.snapdeal.com/service/'
self.header = {'Content-Type': 'application/json; charset=utf-8'}
self.session = self.getSession()
self.userName = userName
self.passWord = passWord
self.loginWithMobile = loginWithMobile
def getSession(self):
''' Session
Create a session to GET or POST data.
'''
session = requests.Session()
return session
def postLogin(self):
'''Post Login Call
Sign in to the server
'''
loginType = ''
if self.loginWithMobile:
url = self.api_url + 'user/login/v2/loginWithMobile/'
loginType = 'mobileNumber'
else:
url = self.api_url + 'user/login/v2/loginWithEmail/'
loginType = 'emailId'
json_data = {"requestProtocol":"PROTOCOL_JSON","responseProtocol":"PROTOCOL_JSON",loginType:self.userName,"password":self.passWord,"apiKey":"snapdeal"}
login_response = self.session.post(url, json = json_data, headers = self.header)
self.login_token = login_response.headers.get('Login-Token')
return login_response
def postLogout(self):
'''Post Sign Out call
Sign out from the server
'''
url = self.api_url + 'signout/'
json_data = {'loginToken':self.login_token}
return self.session.post(url, json = json_data, headers = self.header)
def addToCart(self, pinCode, vendorCode, supc, catalogId, qty):
'''Addition to Cart
Add the product to Cart
'''
url = self.api_url + 'nativeCart/v2/insertItemToCart'
json_data = {"pincode":"110025","items":[{"vendorCode":vendorCode,"supc":supc,"catalogId":catalogId
,"quantity":qty}],"loginToken":self.login_token}
cart_response = self.session.post(url, json = json_data, headers = self.header)
if cart_response.json().get('successful'):
print(''.join(cart_response.json().get('messages')))
else:
print('Product was not added, something went wrong')
def valLogin(self):
'''Validates Login Session
Validates that login was successful
'''
login_response = self.postLogin()
if login_response.json().get('status') == 'SUCCESS':
print('You are Successfully Logged in')
return True
else:
print('Something went wrong\n')
print(login_response.json().get('exceptions')[0].get('errorMessage'))
print('\nLogin Again\n')
return False
def valLogout(self):
'''Validates Logout Session
Validates that Logout was successful
'''
logout_response = self.postLogout()
if logout_response.json().get('status') == 'true':
print('You are Successfully Logged out....')
else:
print(logout_response.json().get('code'))
#This function is used for input username and password.
def inputUserData():
loginWithMobile = True
while(True):
userName = input('Please enter your Registered Username of Snapdeal\n')
if userName.isdigit and len(userName) == 10:
loginWithMobile = True
break
elif '@' in userName:
loginWithMobile = False
break
else:
print('\nPlease enter valid Username\n')
print('--'*40)
userPassword = input('Please enter your current Password\n')
return (userName, userPassword, loginWithMobile)
#This function is used to set product details.
def setCart():
pincode = ''
while(True):
pinCode = input('Please enter pincode for adding product to Cart\n')
if len(pinCode) == 6 and pinCode.isdigit():
break
else:
print('Please enter Valid Pincode')
#Product Details.
vendorCode = 'S667db'
supc = 'SDL044719313'
catalogId = 643083255133
qty = 1
print('\nFor the demo, the vendorCode, supc, catalogId and quantity are harcoded to \n{0}, {1}, {2} and {3} respectively\n'.format(vendorCode, supc, str(catalogId), str(qty)))
return (pinCode, vendorCode, supc, catalogId, qty)
#Details of the Task given.
def taskDetails():
print('--'*40)
print('\n'+'Task Details'.center(60,' ')+'\n')
print(' 1. Login into snapdeal using it\'s API\n')
print(' 2. Add any product to cart using API\n')
print(' 3. Logout from snapdeal\n')
print('--'*40)
#Main class for all other functions.
def main():
snap = None
check = True
#task details function
taskDetails()
while(check):
userData = inputUserData()
print('--'*40)
#Initializing the Snap Class.
snap = Snap(*userData)
print('Validating your Credentials.........\n')
#Login and validates the input data.
check = not(snap.valLogin())
print('--'*40)
# Adding Product to Cart.
cartData = setCart()
print('Adding Product to Cart\n')
snap.addToCart(*cartData)
#Signing out the user
print('--'*40)
print('\nFinally signing out.........\n')
snap.valLogout()
if __name__ == '__main__':
main()
|
985,905 | 0d8adc94890c2566288fda9324051b7649b0fa12 | import applescript
script = applescript.AppleScript("""
on readcell(x, y)
tell application "Numbers"
tell document "Untitled"
tell sheet "Sheet 3"
tell table "Table 1"
set a to value of cell y of column x
end tell
end tell
end tell
end tell
end
""")
print script.call('readcell', 1, 1)
"""
tell application "Numbers" to tell document "Untitled" tell sheet "Sheet 3" to tell table "Table 1"
set a to value of cell y of column x
end tell
""" |
985,906 | 9acd675959fe390d95123f364745c161756302d6 | import datetime
import logging
def days_from_leg(leg):
result = []
date = leg['dates']['start']
while date <= leg['dates']['end']:
result.append(date)
date = date + datetime.timedelta(days=1)
return result
def season_for_day(day, seasons):
cmpday = datetime.datetime(day.year, day.month, day.day)
for season in seasons:
eff_date = datetime.datetime.strptime(season['eff_date'], "%m/%d/%Y")
exp_date = datetime.datetime.strptime(season['exp_date'], "%m/%d/%Y")
if (cmpday >= eff_date) and (cmpday <= exp_date):
return season
return None
def cost_for_day(day, season):
if season is not None:
return dict(
day=day,
lodging=int(season['lodging']),
lodging_multiplier=1.0,
meals=int(season['meals']),
incidentals=int(season['incidentals']),
mie_multiplier=1.0
)
else:
return None
def perdiem_costs_by_query(days, query):
if not query['found']:
result = dict(
searched_location=query['original_search_location'],
travel_cost=None,
matched_location=None
)
else:
topmatch = query['closest_matches'][0]
result = dict(
searched_location=query['original_search_location'],
score=topmatch['score'],
travel_cost=None,
matched_location=topmatch['location'],
dates=[cost_for_day(
day,
season_for_day(day, topmatch['seasons']))
for day in days]
)
return result
def perdiem_costs(days, location, pd_db):
return perdiem_costs_by_query(days, pd_db.perdiem_query(location))
def trip_costs_by_perdiems(trip, perdiems):
"""
Takes a trip (list of legs, leg->(route, dates, staying_in))
and a list of perdiem queries the
same length as the number of legs,
returns [leg->(route,costs,staying_in)]
"""
assert(len(trip) == len(perdiems))
def calc_legs(trip, perdiems):
return [dict(route=leg['route'],
dates=leg['dates'],
staying_in=leg['staying_in'],
costs=perdiem_costs_by_query(days_from_leg(leg), perdiem))
for (leg, perdiem) in zip(trip, perdiems)]
def set_last_day_lodging(legs):
for leg in legs:
costs = leg['costs']
if costs['matched_location'] is not None:
# you're not staying in lodging on the last day
costs['dates'][-1]['lodging_multiplier'] = 0.0
def set_travel_mie(leg, date_index):
if leg['costs']['matched_location'] is not None:
leg['costs']['dates'][date_index]['mie_multiplier'] = 0.75
legs = calc_legs(trip, perdiems)
set_last_day_lodging(legs)
set_travel_mie(legs[0], 0)
set_travel_mie(legs[-1], -1)
return legs
def trip_costs(trip, pd_db, threshold):
perdiems = [pd_db.perdiem_query(leg['staying_in'], threshold=threshold)
for leg in trip]
for leg_pd in perdiems:
logging.debug("Returned {0} matches, top being {1}".format(
len(leg_pd['closest_matches']),
leg_pd['closest_matches'][0] if leg_pd['found'] else "NO MATCH"
))
legs = trip_costs_by_perdiems(
trip,
perdiems
)
return legs
def calculated_trip_costs(trip_costs, route_estimators=[]):
result = trip_costs.copy()
# walk through the trip and calculate
def route_costs(leg, estimators):
for est in estimators:
route = leg['route']
if est.services(route['mode']):
return est.single_estimate(route['start'],
route['end'],
leg['dates']['start'])
return None
def set_route_cost(leg, estimators):
leg['costs']['travel_cost'] = route_costs(leg, estimators)
def set_actual_perdiem_costs(leg):
if leg['costs']['matched_location'] is not None:
for day in leg['costs']['dates']:
day['lodging_actual'] = day['lodging']\
* day['lodging_multiplier']
day['mie_actual'] = (day['meals'] + day['incidentals']) \
* day['mie_multiplier']
for leg in result:
set_route_cost(leg, route_estimators)
set_actual_perdiem_costs(leg)
return result
def adjusted_trip_costs(trip, pd_db, route_estimators=[], threshold=90):
result = trip_costs(trip, pd_db, threshold=threshold)
return calculated_trip_costs(result, route_estimators)
return result
|
985,907 | e4ee47f53cc11dbfa7ce4f0116cdeca136b386a7 | # Copyright 2018 Jetperch LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This executable captures the raw USB stream from Joulescope devices
and saves the raw stream data to a file. This executable is a
development tool and is not intended for customer use.
"""
import signal
import struct
import time
import logging
from joulescope.usb import scan
from joulescope.usb.device_thread import DeviceThread
def parser_config(p):
"""Capture raw USB data from Joulescope"""
p.add_argument('--duration',
type=float,
help='The capture duration.')
p.add_argument('--endpoint_id',
type=int,
default=2,
help='The endpoint identifier.')
p.add_argument('--threaded', '-t',
default=0,
action='count',
help='Use the threaded wrapper')
p.add_argument('filename',
help='The filename for output data.')
return on_cmd
def on_cmd(args):
d = scan(name='Joulescope')
if not(len(d)):
print('No devices found')
return 1
elif len(d) != 1:
print('More than on device found)')
return 2
device = d[0]
return run(device, filename=args.filename,
duration=args.duration,
endpoint_id=args.endpoint_id,
threaded=args.threaded)
def stream_settings(device):
"""Configure the Joulescope stream settings
:param device: The USB device instance.
"""
version = 1
length = 16
msg = struct.pack('<BBBBIBBBBBBBB',
version,
length,
0x01, # PktType settings
0x00, # reserved
0x00, # reserved
0x01, # sensor_power,
0x00, # i_range,
0xC0, # source raw,
0x00, # options
0x03, # streaming normal
0, 0, 0)
rv = device.control_transfer_out(
'device', 'vendor', request=3,
value=0, index=0, data=msg)
return rv
def run(device, filename, duration, endpoint_id, threaded):
logging.basicConfig(level=logging.DEBUG)
quit_ = False
d = device
for _ in range(threaded):
d = DeviceThread(d)
time_start = time.time()
time_last = time_start
with open(filename, 'wb') as fh:
def do_quit(*args, **kwargs):
nonlocal quit_
quit_ = 'quit from SIGINT'
def on_data(data, length=None):
nonlocal quit_
if data is None:
if not quit_:
quit_ = 'quit for on_data'
else:
fh.write(bytes(data)[:length])
if duration is not None:
if time.time() - time_start > duration:
return True
return False
def on_process():
return False
signal.signal(signal.SIGINT, do_quit)
print('Press CTRL-C to stop data collection')
try:
d.open()
rv = stream_settings(d)
if 0 != rv.result:
print('warning: %s', rv)
d.read_stream_start(
endpoint_id=endpoint_id,
transfers=8,
block_size=256 * 512,
data_fn=on_data,
process_fn=on_process)
while not quit_:
d.process(timeout=0.01)
time_now = time.time()
if time_now - time_last > 1.0:
print(d.status())
time_last = time_now
d.read_stream_stop(endpoint_id)
finally:
d.close()
print('done capturing data: %s' % quit_)
return 0
|
985,908 | f1210816537a5cbf83afdcc39bf83fdffaf2812a | import sys
from botocore.exceptions import ClientError
import boto3
ec2 = boto3.client('ec2')
instance_id = sys.argv[1].split(',')
#instance_id = sys.argv[1]
#print(type(instance_id))
#print(instance_id)
action = sys.argv[2].lower()
if action == "start":
try:
res = ec2.start_instances(InstanceIds=instance_id,DryRun=True)
except ClientError as e:
if 'DryRunOperation' not in str(e):
print("Something went wrong.")
raise
try:
res = ec2.start_instances(InstanceIds=instance_id)
print('instance started successfully')
except ClientError as e:
print('Error', e)
elif action == 'stop':
try:
res = ec2.stop_instances(InstanceIds=instance_id,DryRun=True)
except ClientError as e:
if 'DryRunOperation' not in str(e):
print("Something went wrong.")
raise
try:
res = ec2.stop_instances(InstanceIds=instance_id)
print('instance stopped successfully')
except ClientError as e:
print('Error', e)
else:
print("Valid operations are [start/stop]")
|
985,909 | 7ba74e9adf27649f61b7a7f242166a48df22eb53 | import json
from botocore.vendored import requests
import os
import random
def lambda_handler(event, context):
ua_samples = ['Mozilla/5.0 CK={} (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1)',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/17.17134',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; KTXN)',
'Mozilla/5.0 (Windows NT 5.1; rv:7.0.1) Gecko/20100101 Firefox/7.0.1',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/54.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)',
'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/605.1.15 (KHTML, like Gecko)',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/20100101 Firefox/18.0',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/605.1.15 (KHTML, like Gecko)',
'Mozilla/4.0 (compatible; MSIE 9.0; Windows NT 6.1; 125LA; .NET CLR 2.0.50727; .NET CLR 3.0.04506.648; .NET CLR 3.5.21022)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; .NET CLR 1.1.4322)',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 Edge/18.18362',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.71 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.83 Safari/537.1',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393',
'Mozilla/4.0 (compatible; MSIE 9.0; Windows NT 6.1)',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 Edge/18.18363',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/605.1.15 (KHTML, like Gecko)',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows 98)',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.121 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36 Edge/15.15063',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1 Safari/605.1.15']
params = event["queryStringParameters"]
url = params["url"]
headers = {"User-Agent": random.choice(ua_samples)}
print(f'requesting: {url}\nheaders:{headers}')
result = requests.get(url, headers=headers, timeout=5)
print(f'Response from {url}:', result.status_code)
if result.ok:
return {"statusCode": result.status_code,
"body": f"success:{str(result.headers)}"}
else:
return {"statusCode": result.status_code,
"body": f"fail:{str(result.headers)}"}
|
985,910 | 36a6de86f74bb5af7e56f083f35daf6486719660 | import unittest
import mock
from mock import MagicMock,patch
import os.path
import logging
import sys,os
from MockData import *
import sys
import sys, os
sys.path.append(os.path.abspath(os.path.join('..', 'extensions/')))
import extensions
sys.path.append(os.path.abspath(os.path.join('..', 'LoggingDatabase/')))
import LoggingErrorsinDatabase
sys.path.append(os.path.abspath(os.path.join('..', 'Databaselayer/')))
import LoginClass
sys.path.append(os.path.abspath(os.path.join('..', 'Businesslayer/')))
import LoginMyClass
class Test_LoginMyClass(unittest.TestCase):
def test_getLoginDetails_1(self):
mockObject = LoginMyClass.LoginMyClass(Emailid[1],True, Firstname[1], TypeOfUser[1],'','pass')
mockObject.getLoginDetails = MagicMock(return_value=(True,Firstname[1],TypeOfUser[1]))
loggedIn, FirstName, TypeOFUser = mockObject.getLoginDetails()
assert (loggedIn) == (True)
def test_getLoginDetails_2(self):
mockObject = LoginMyClass.LoginMyClass(Emailid[1],False, Firstname[1], TypeOfUser[1],'','pass')
mockObject.getLoginDetails = MagicMock(return_value=(False,Firstname[1],TypeOfUser[1]))
loggedIn1, FirstName1, TypeOFUser1 = mockObject.getLoginDetails()
assert loggedIn1 == False
if __name__ == '__main__':
unittest.main() |
985,911 | 6f5e614f8a27ddc24a539f65aac92b199ddb02e3 | from typing import List
class Permutation:
def permute(self, nums):
if not nums:
return [[]]
res, temp = [], []
visited = [False for _ in range(len(nums))]
self.dfs(res, temp, nums, visited)
return res
def dfs(self, res, temp, nums, visited):
if len(temp) == len(nums):
res.append(temp[:])
return
for i in range(len(nums)):
if visited[i]:
continue
temp.append(nums[i])
visited[i] = True
self.dfs(res, temp, nums, visited)
visited[i] = False
temp.pop()
|
985,912 | 5022aa4a5687090c1acc2309969dd09854bbee37 | from flask import Flask
from flask import render_template, request
from textblob import Word
import json
import main
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/get_end_predictions', methods=['post'])
def get_prediction_eos():
try:
input_text = ' '.join(request.json['input_text'].split())
input_text += ' <mask>'
top_k = request.json['top_k']
res = main.get_all_predictions(input_text, top_clean=int(top_k))
return app.response_class(response=json.dumps(res), status=200, mimetype='application/json')
except Exception as error:
return app.response_class(response=json.dumps(str(error)), status=500, mimetype='application/json')
@app.route('/auto_correction', methods=["POST"])
def auto_correction():
data = {'result': [], 'underline': False, 'word': ''}
try:
input_text = (' '.join(request.json['input_text'].split())).split(' ')[-1]
if Word(input_text).spellcheck()[0][1] < 1.0:
sug_words = [i[0] for i in Word(input_text).spellcheck()[:3]]
data = {'result': sug_words, 'underline': True, 'word': input_text}
return app.response_class(response=json.dumps(data), status=200, mimetype='application/json')
except Exception as error:
return app.response_class(response=json.dumps(str(error)), status=500, mimetype='application/json')
app.config["DEBUG"] = True
if __name__ == "__main__":
app.run(host='127.0.0.1', port=8083)
|
985,913 | d3711bfd38c5047088b088de69dead96d5daa4b1 | import pytest
@pytest.fixture()
def AnsibleDefaults(Ansible):
return Ansible("include_vars", "defaults/main.yml")["ansible_facts"]
@pytest.fixture()
def Hostname(TestinfraBackend):
return TestinfraBackend.get_hostname()
def test_gor_template_user(User, Group, AnsibleDefaults):
assert Group(AnsibleDefaults["gor_group"]).exists
assert User(AnsibleDefaults["gor_user"]).exists
assert User(AnsibleDefaults["gor_user"]).group == AnsibleDefaults["gor_group"]
def test_gor_binary(File, AnsibleDefaults):
gor = File(AnsibleDefaults["gor_bin_dir"] + "/gor")
assert gor.user == AnsibleDefaults["gor_user"]
assert gor.group == AnsibleDefaults["gor_group"]
assert File("/usr/bin/gor").exists
assert File("/usr/bin/gor").is_symlink
assert File("/usr/bin/gor").linked_to == AnsibleDefaults["gor_root_dir"] + "/bin/gor"
def test_gor_service(File, Service, Socket, Interface, Hostname):
assert File("/etc/systemd/system/gor.service").exists
assert not Service("gor").is_enabled
assert Service("gor").is_running
def test_gor_functionality(File, Sudo, Hostname):
requests = File("/opt/gor/out/requests_0.gor")
if Hostname in ("test01", "prod02"):
assert requests.exists
with Sudo("gor"):
assert requests.contains("ansible-httpget")
else:
assert not requests.exists
|
985,914 | ffd516959a2cf5c07cacd07b22e955b518c7f789 |
def rj_parse_mosflm_cr_log(mosflm_cr_log):
# get the cell constants (&c.) from the mosflm log output...
cell = None
mosaic = None
for record in mosflm_cr_log:
if 'Refined cell' in record:
if not 'parameters' in record:
cell = tuple(map(float, record.split()[-6:]))
if 'Refined mosaic spread' in record:
mosaic = float(record.split()[-1])
if not cell:
raise RuntimeError, 'cell not found'
if not mosaic:
raise RuntimeError, 'mosaic not found'
return cell, mosaic
def rj_parse_mosflm_cr_log_rmsd(mosflm_cr_log):
# get the r.m.s. deviations as a function of image number
collecting = False
images = []
rmsds = { }
for record in mosflm_cr_log:
if 'Rms positional error (mm)' in record:
collecting = True
if 'YSCALE' in record:
collecting = False
if collecting and 'Image' in record:
for token in record.replace('Image', '').split():
images.append(int(token))
if collecting and 'Cycle' in record:
cycle = int(record.split()[1])
if not cycle in rmsds:
rmsds[cycle] = []
for token in record.split()[2:]:
rmsds[cycle].append(float(token))
return images, rmsds
if __name__ == '__main__':
import sys
images, rmsds = rj_parse_mosflm_cr_log_rmsd(open(sys.argv[1]).readlines())
for i in range(len(images)):
record = '%3d' % images[i]
for cycle in sorted(rmsds):
record += ' %.3f' % (rmsds[cycle][i])
print record
|
985,915 | 3097c1a8ee8f0f29d78f002c626746d01831f25f | #!/usr/bin/env python
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp
import tables
import os
import sys
from icecube.umdtools import cache, misc
from scipy import optimize as opt
from icecube import icetray, dataclasses, histlite, astro
from skylab.psLLH_stack import PointSourceLLH #What I'm using - uses energy info
from skylab.ps_model_stack import ClassicLLH #calculates llh from spatial information only
from scipy.stats import chi2
import healpy as hp
import itertools
from scipy.signal import convolve2d
from skylab.ps_injector_stack import PointSourceInjector
from skylab.psLLH_stack import MultiPointSourceLLH
from skylab.utils import poisson_weight
from optparse import OptionParser
import argparse
##This entire practice script is just to try to do single_source sensitivities with the stacking code, in a vain hope that it'll come out correct.
sys.path.append("/home/brelethford/Documents/IceCube_Research/Scripts/AGN_Core")
projfolder='/home/brelethford/Documents/IceCube_Research/'
datafolder='/data/user/brelethford/Data/'
filename_plots=projfolder+'Plots/AGNCore/Stacking/'
filename_pickle = projfolder+'Scripts/AGN_Core/sensitivity/pickle/'
## I only need one source, the src_dec of which will be determined by the submitter scripts. ##
parser = OptionParser (usage = '%prog [options]')
parser.add_option ('--dec', dest = 'dec', type = float,
default = 0., metavar = 'DEC',
help = 'sin of the source declination.')
parser.add_option ('--batch', dest = 'batch', type = int,
default = 0, metavar = 'BATCH',
help = 'Assigns a number to each batch of background trials.')
parser.add_option ('--batchsize', dest = 'batchsize', type = int,
default = 1000, metavar = 'BATCHSIZE',
help = 'Assigns how many background trials are used in each batch.')
parser.add_option ('--years', dest = 'years', type = int,
default = 2, metavar = 'YEARS',
help = 'Number of years of data')
opts, args = parser.parse_args ()
dec_deg = np.arcsin(opts.dec) * 180./np.pi
years = opts.years
src_ra=[0.0]
src_dec=[np.radians(dec_deg)]
batch = opts.batch
batchsize = opts.batchsize
import data_multi
llh40 = data_multi.init40(energy=True, mode='box')
samples=[llh40]
if years>1:
llh59 = data_multi.init59(energy=True, mode='box')
samples.append(llh59)
if years>2:
llh79 = data_multi.init79(energy=True, mode='box')
samples.append(llh79)
if years>3:
llh86 = data_multi.init86I(energy=True, mode='box')
samples.append(llh86)
print(len(samples))
llhmodel = data_multi.multi_init(samples,energy=True)
bckg_trials_single = PointSourceLLH.background_scrambles(llhmodel,src_ra,src_dec,alpha=0.5,maxiter=batchsize)
## Background Trials have the following keys:##
##['beta', 'TS_beta', 'beta_err', 'n_inj', 'nsources', 'TS', 'gamma']##
## Let's use a uniform weight (none) first to yield our bckg trials. ##
#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath (__file__))
out_dir = misc.ensure_dir ('/data/user/brelethford/Output/all_sky_sensitivity/results/single_stacked/multi_year/{0}/dec{1:+010.5}/'.format(str(years),dec_deg))
# save the output
outfile = out_dir + 'batch_{0:03}.array'.format(batch)
print 'Saving', outfile, '...'
cache.save(bckg_trials_single, outfile)
|
985,916 | 687a46fdf47c2a0b4ddd5c7cea2b227c4d497d24 | import h2o
from tests import pyunit_utils
from h2o.estimators.kmeans import H2OKMeansEstimator
import random
def random_attack():
def attack(train, x):
kwargs = {}
# randomly select parameters and their corresponding values
kwargs['k'] = random.randint(1, 20)
if random.randint(0, 1): kwargs['model_id'] = "my_model"
if random.randint(0, 1): kwargs['max_iterations'] = random.randint(1, 1000)
if random.randint(0, 1): kwargs['standardize'] = [True, False][random.randint(0, 1)]
if random.randint(0, 1):
method = random.randint(0, 3)
if method == 3:
# Can be simplified to: train[x].mean() + (train[x].runif() - 0.5)*200
# once .runif() is fixed
s = [[train[c].mean().getrow()[0] + random.uniform(-100, 100)
for p in range(kwargs['k'])] for c in x]
print("s: {0}".format(s))
start = h2o.H2OFrame(list(zip(*s)))
kwargs['user_points'] = start
else:
kwargs['init'] = ["Furthest", "Random", "PlusPlus"][method]
if random.randint(0, 1): kwargs['seed'] = random.randint(1, 10000)
# display the parameters and their corresponding values
print("-----------------------")
print("x: {0}".format(x))
for k, v in kwargs.items():
if k == 'user_points':
print(k + ": ")
start.show()
else:
print(k + ": {0}".format(v))
H2OKMeansEstimator(**kwargs).train(x=x, training_frame=train)
print("-----------------------")
print("Import Ozone.csv...")
ozone = h2o.import_file(path=pyunit_utils.locate("smalldata/glm_test/ozone.csv"))
for i in range(50):
attack(ozone, random.sample([0, 1, 2, 3], random.randint(1, 4)))
if __name__ == "__main__":
pyunit_utils.standalone_test(random_attack)
else:
random_attack()
|
985,917 | 037e8237dc20606a998368cb54d1791f1813c649 | # ---------------------------------------------------------------------
# inv.inv application
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import inspect
import os
from typing import Optional, Dict, List, Any, Tuple
# NOC modules
from noc.services.web.base.extapplication import ExtApplication, view
from noc.inv.models.object import Object
from noc.inv.models.error import ConnectionError
from noc.inv.models.objectmodel import ObjectModel
from noc.core.validators import is_objectid
from noc.sa.interfaces.base import (
StringParameter,
ObjectIdParameter,
UnicodeParameter,
ListOfParameter,
BooleanParameter,
)
from noc.core.inv.path import find_path
from noc.core.translation import ugettext as _
from noc.core.comp import smart_text
translation_map = str.maketrans("<>", "><")
class InvApplication(ExtApplication):
"""
inv.inv application
"""
title = _("Inventory")
menu = _("Inventory")
# Undeletable nodes
UNDELETABLE = {
# Global Lost&Found
"b0fae773-b214-4edf-be35-3468b53b03f2"
}
def __init__(self, *args, **kwargs):
ExtApplication.__init__(self, *args, **kwargs)
# Load plugins
from .plugins.base import InvPlugin
self.plugins = {}
for f in os.listdir("services/web/apps/inv/inv/plugins/"):
if not f.endswith(".py") or f == "base.py" or f.startswith("_"):
continue
mn = "noc.services.web.apps.inv.inv.plugins.%s" % f[:-3]
m = __import__(mn, {}, {}, "*")
for on in dir(m):
o = getattr(m, on)
if inspect.isclass(o) and issubclass(o, InvPlugin) and o.__module__.startswith(mn):
assert o.name
self.plugins[o.name] = o(self)
def get_plugin_data(self, name):
return {"name": name, "xtype": self.plugins[name].js}
@view("^node/$", method=["GET"], access="read", api=True)
def api_node(self, request):
children = []
if request.GET and "node" in request.GET:
container = request.GET["node"]
if is_objectid(container):
container = Object.get_by_id(container)
if not container:
return self.response_not_found()
children = [(o.name, o) for o in Object.objects.filter(container=container.id)]
# Collect inner connections
children += [(name, o) for name, o, _ in container.get_inner_connections()]
elif container == "root":
cmodels = [
d["_id"]
for d in ObjectModel._get_collection().find(
{"data.container.container": True}, {"_id": 1}
)
]
children: List[Tuple[str, "Object"]] = [
(o.name, o)
for o in Object.objects.filter(
__raw__={"container": None, "model": {"$in": cmodels}}
)
]
else:
return self.response_bad_request()
r = []
# Build node interface
for name, o in children:
m_plugins = o.model.plugins or []
disabled_plugins = set(p[1:] for p in m_plugins if p.startswith("-"))
n = {
"id": str(o.id),
"name": name,
"plugins": [],
"can_add": bool(o.get_data("container", "container")),
"can_delete": str(o.model.uuid) not in self.UNDELETABLE,
}
if o.get_data("container", "container") or o.has_inner_connections():
# n["expanded"] = Object.objects.filter(container=o.id).count() == 1
n["expanded"] = False
else:
n["leaf"] = True
if o.get_data("rack", "units"):
n["plugins"] += [self.get_plugin_data("rack")]
if o.model.connections:
n["plugins"] += [self.get_plugin_data("inventory")]
if o.get_data("geopoint", "layer"):
n["plugins"] += [self.get_plugin_data("map")]
if o.get_data("management", "managed_object"):
n["plugins"] += [self.get_plugin_data("managedobject")]
if o.get_data("contacts", "has_contacts"):
n["plugins"] += [self.get_plugin_data("contacts")]
if o.model.sensors:
n["plugins"] += [self.get_plugin_data("sensor")]
# Append model's plugins
for p in m_plugins:
if not p.startswith("-"):
n["plugins"] += [self.get_plugin_data(p)]
n["plugins"] += [
self.get_plugin_data("data"),
self.get_plugin_data("comment"),
self.get_plugin_data("file"),
self.get_plugin_data("log"),
]
if o.get_data("container", "container"):
n["plugins"] += [self.get_plugin_data("sensor")]
n["plugins"] += [self.get_plugin_data("crossing")]
# Process disabled plugins
n["plugins"] = [p for p in n["plugins"] if p["name"] not in disabled_plugins]
r += [n]
return r
@view(
"^add_group/$",
method=["POST"],
access="create_group",
api=True,
validate={
"container": ObjectIdParameter(required=False),
"type": ObjectIdParameter(),
"name": UnicodeParameter(),
"serial": UnicodeParameter(required=False),
},
)
def api_add_group(self, request, type, name, container=None, serial=None):
if is_objectid(container):
c = Object.get_by_id(container)
if not c:
return self.response_not_found()
c = c.id
elif container:
return self.response_bad_request()
else:
c = None
m = ObjectModel.get_by_id(type)
if not m:
return self.response_not_found()
o = Object(name=name, model=m, container=c)
if serial and m.get_data("asset", "part_no0"):
o.set_data("asset", "serial", serial)
o.save()
o.log("Created", user=request.user.username, system="WEB", op="CREATE")
return str(o.id)
@view(
"^remove_group/$",
method=["DELETE"],
access="remove_group",
api=True,
validate={"container": ObjectIdParameter(required=True)},
)
def api_remove_group(self, request, container=None):
c = self.get_object_or_404(Object, id=container)
c.delete()
return True
@view(
"^insert/$",
method=["POST"],
access="reorder",
api=True,
validate={
"container": ObjectIdParameter(required=False),
"objects": ListOfParameter(element=ObjectIdParameter()),
"position": StringParameter(),
},
)
def api_insert(self, request, container, objects, position):
"""
:param request:
:param container: ObjectID after/in that insert
:param objects: List ObjectID for insert
:param position: 'append', 'before', 'after'
:return:
"""
c = self.get_object_or_404(Object, id=container)
o = []
for r in objects:
o += [self.get_object_or_404(Object, id=r)]
if position == "append":
for x in o:
x.put_into(c)
elif position in ("before", "after"):
cc = self.get_object_or_404(Object, id=c.container.id) if c.container else None
for x in o:
x.put_into(cc)
return True
@view("^(?P<id>[0-9a-f]{24})/path/$", method=["GET"], access="read", api=True)
def api_get_path(self, request, id):
o = self.get_object_or_404(Object, id=id)
path = [{"id": str(o.id), "name": o.name}]
while o.container:
o = o.container
path.insert(0, {"id": str(o.id), "name": o.name})
return path
@view(
"^crossing_proposals/$",
method=["GET"],
access="read",
api=True,
validate={
"o1": ObjectIdParameter(required=True),
"o2": ObjectIdParameter(required=False),
"left_filter": UnicodeParameter(required=False),
"right_filter": UnicodeParameter(required=False),
"cable_filter": UnicodeParameter(required=False),
},
)
def api_get_crossing_proposals(
self,
request,
o1,
o2=None,
left_filter: Optional[str] = None,
right_filter: Optional[str] = None,
cable_filter: Optional[str] = None,
):
"""
API for connnection form.
1) If cable_filter set, checked connection capable with cable.
2) If left_filter set, check renmote object
:param request:
:param o1:
:param o2:
:param left_filter:
:param right_filter:
:param cable_filter:
:return:
"""
self.logger.info(
"Crossing proposals: %s:%s, %s:%s. Cable: %s",
o1,
left_filter,
o2,
right_filter,
cable_filter,
)
lo: Object = self.get_object_or_404(Object, id=o1)
ro: Optional[Object] = None
if o2:
ro = self.get_object_or_404(Object, id=o2)
id_ports_left = {}
checking_ports = []
lcs: List[Dict[str, Any]] = []
cable: Optional[ObjectModel] = None
# Getting cable
cables = ObjectModel.objects.filter(data__length__length__gte=0)
if cable_filter:
cable = ObjectModel.get_by_name(cable_filter)
for c in lo.model.connections:
valid, disable_reason = True, ""
if cable_filter:
# If select cable_filter - check every connection to cable
cable_connections = [
c for c in lo.model.get_connection_proposals(c.name) if c[0] == cable.id
]
valid = bool(cable_connections)
elif ro and right_filter:
rc = ro.model.get_model_connection(right_filter)
if not rc:
raise
valid, disable_reason = lo.model.check_connection(c, rc)
elif ro:
valid = bool(
[c for c in lo.model.get_connection_proposals(c.name) if c[0] == ro.model.id]
)
oc, oo, _ = lo.get_p2p_connection(c.name)
left_id = f"{smart_text(lo.id)}{c.name}"
is_employed = bool(oc)
if is_employed:
checking_ports.append(c)
lcs += [
self.get_cs_item(
left_id,
c.name,
str(c.type.id),
c.type.name,
c.gender,
c.direction,
c.protocols,
not is_employed,
valid,
disable_reason,
lo,
)
]
id_ports_left[c.name] = left_id
id_ports_right = {}
rcs: List[Dict[str, Any]] = []
if ro:
for c in ro.model.connections:
valid, disable_reason = True, ""
if cable_filter:
cable_connections = [
c for c in ro.model.get_connection_proposals(c.name) if c[0] == cable.id
]
valid = bool(cable_connections)
elif left_filter:
lc = lo.model.get_model_connection(left_filter)
if not lc:
raise
valid, disable_reason = lo.model.check_connection(c, lc)
else:
valid = bool(
[
c
for c in ro.model.get_connection_proposals(c.name)
if c[0] == lo.model.id
]
)
oc, oo, _ = ro.get_p2p_connection(c.name)
right_id = f"{smart_text(ro.id)}{c.name}"
rcs += [
self.get_cs_item(
right_id,
c.name,
str(c.type.id),
c.type.name,
c.gender,
c.direction,
c.protocols,
not bool(oc),
valid,
disable_reason,
ro,
)
]
id_ports_right[c.name] = right_id
wires = []
device_left = {}
device_right = {}
if lcs and rcs:
device_left["id"] = smart_text(lo.id)
device_left["name"] = lo.name
device_right["id"] = smart_text(ro.id)
device_right["name"] = ro.name
for p in checking_ports:
remote = self.get_remote_slot(p, lo, ro)
if remote:
wires.append(
{
"left": {"id": id_ports_left.get(p.name, 0), "name": p.name},
"right": {
"id": id_ports_right.get(remote.connection, 0),
"name": remote.connection,
},
}
)
# Forming cable
return {
"left": {"connections": lcs, "device": device_left},
"right": {"connections": rcs, "device": device_right},
"cable": [{"name": c.name, "available": True} for c in cables],
"valid": lcs and rcs and left_filter and right_filter,
"wires": wires,
}
@view(
"^connect/$",
method=["POST"],
access="connect",
api=True,
validate={
"object": ObjectIdParameter(required=True),
"name": StringParameter(required=True),
"remote_object": ObjectIdParameter(required=True),
"remote_name": StringParameter(required=True),
# "cable": ObjectIdParameter(required=False),
"cable": StringParameter(required=False),
"reconnect": BooleanParameter(default=False, required=False),
},
)
def api_connect(
self,
request,
object,
name,
remote_object,
remote_name,
cable: Optional[str] = None,
reconnect=False,
):
lo: Object = self.get_object_or_404(Object, id=object)
ro: Object = self.get_object_or_404(Object, id=remote_object)
cable_o: Optional[Object] = None
if cable:
cable = ObjectModel.get_by_name(cable)
cable_o = Object(
name=f"Wire {lo.name}:{name} <-> {ro.name}:{remote_name}",
model=cable,
container=lo.container.id,
)
cable_o.save()
try:
if cable_o:
c1, c2 = cable_o.model.connections[:2]
self.logger.debug("Wired connect %s:%s", c1, c2)
lo.connect_p2p(name, cable_o, c1.name, {}, reconnect=reconnect)
ro.connect_p2p(remote_name, cable_o, c2.name, {}, reconnect=reconnect)
lo.save()
ro.save()
else:
lo.connect_p2p(name, ro, remote_name, {}, reconnect=reconnect)
except ConnectionError as e:
self.logger.warning("Connection Error: %s", str(e))
return self.render_json({"status": False, "text": str(e)})
return True
def get_remote_slot(self, left_slot, lo, ro):
"""
Determing right device's slot with find_path method
:return:
"""
for path in (
find_path(
lo,
left_slot.name,
[p.translate(translation_map) for p in left_slot.protocols],
trace_wire=True,
)
or []
):
if path.obj == ro:
return path
def get_remote_device(self, slot, protocols, o):
"""
Determing remote device with find_path method
:return:
"""
for path in (
find_path(o, slot, [p.translate(translation_map) for p in protocols], trace_wire=True)
or []
):
if path.obj != o and not path.obj.is_wire:
return path
def get_cs_item(
self,
id,
name,
type,
type__label,
gender,
direction,
protocols,
free,
valid,
disable_reason,
o,
):
"""
Creating member of cs dict
:return:
"""
cs = {
"id": id,
"name": name,
"type": type,
"type__label": type__label,
"gender": gender,
"direction": direction,
"protocols": protocols,
"free": free,
"valid": valid,
"disable_reason": disable_reason,
}
if not free:
rd = self.get_remote_device(name, protocols, o)
if rd:
cs["remote_device"] = {
"name": rd.obj.name,
"id": smart_text(rd.obj.id),
"slot": rd.connection,
}
return cs
|
985,918 | 42c2f4779802a3042df265337cd6602800545394 | """ addBinary
Given two binary strings, return their sum (also a binary string).
For example,
a = "11"
b = "1"
Return "100".
"""
class Solution:
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
return bin(int(a,2) + int(b,2))[2:]
print(Solution().addBinary("11", "1")) #100
print(Solution().addBinary("10100000100100110110010000010101111011011001101110111111111101000000101111001110001111100001101",
"110101001011101110001111100110001010100001101011101010000011011011001011101111001100000011011110011"))
#"110111101100010011000101110110100000011101000101011001000011011000001100011110011010010011000000000"
'''alternative: slower, more complicated
class Solution:
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
carry = 0
result = ['0' for x in range(len(a)+1 if len(a)>=len(b) else len(b)+1)]
for i in range(1, len(result)+1):
tempA = int(a[-i]) if (len(a))-i >= 0 else 0
tempB = int(b[-i]) if (len(b))-i >= 0 else 0
temp = tempA + tempB + carry
if temp == 3:
result[-i] = '1'
carry = 1
elif temp == 2:
carry = 1
elif temp == 1:
result[-i] = '1'
carry = 0
else:
carry = 0
return ''.join(result) if result[0] != '0' else ''.join(result)[1:]
'''
|
985,919 | 7cc3483a4a4f652f10819d15c3487bd9a6f6b253 | #mostrar la serie fibonaci
n=0
num=1
ultimo=0
antesUltimo=0
while(n<=15):
print(n,"=",num)
antesUltimo=ultimo
ultimo=num
num=antesUltimo+ultimo
n=n+1 |
985,920 | 075f4ae91efa7098b39d1f4dc96ddccf1eaae352 | # To run this, first, run source .bashrc & source .bash_profile to load up neuron library in python
from neuron import hoc,h
from netpyne import specs,sim
netParams_d1 = specs.NetParams() # object of class NetParams to store the network parameters
netParams_d2 = specs.NetParams() # object of class NetParams to store the network parameters
simConfig = specs.SimConfig() # object of class SimConfig to store the simulation configuration
###############################################################################
# NETWORK PARAMETERS
#cellRule = netParams.importCellParams(label='PT5B_full', conds={'cellType': 'PT', 'cellModel': 'HH_full'},
# fileName='cells/PTcell.hoc', cellName='PTcell', cellArgs=[ihMod2str[cfg.ihModel], cfg.ihSlope], soma_0AtOrigin=True)
# nonSpiny = ['apic_0', 'apic_1']
#netParams.popParams['PT5B'] = {'cellModel': 'HH_full', 'cellType': 'PT', 'ynormRange': layer['5B'], 'numCells':1}
###############################################################################
# Population parameters
cellRule_d1=netParams_d1.importCellParams(label='D1MSN', conds={'cellType': 'D1', 'cellModel': 'D1MSN'}, fileName='d1msn.hoc', cellName='d1msn')
cellRule_d2=netParams_d2.importCellParams(label='D2MSN', conds={'cellType': 'D2', 'cellModel': 'D2MSN'}, fileName='d2msn.hoc', cellName='d2msn')
netParams_d1.popParams['D1MSN'] = {'cellModel': 'D1MSN', 'cellType': 'D1', 'numCells': 10} # add dict with params for this pop
netParams_d2.popParams['D2MSN'] = {'cellModel': 'D2MSN', 'cellType': 'D2', 'numCells': 1} # add dict with params for this pop
netParams_d1.stimSourceParams['AMPA'] = {'type':'NetStim','rate':10,'noise':0.5}
netParams_d1.stimTargetParams['AMPA->D1MSN'] = {'source': 'AMPA', 'conds': {'cellType': 'D1'}, 'weight': 0.01, 'delay': 5, 'synMech': 'exc'}
##################
# Cell parameters
#cellRule_d1 = {'conds': {'cellModel': 'D1MSN', 'cellType': 'D1'}, 'secs': {}} # cell rule dict
#cellRule_d2 = {'conds': {'cellModel': 'd2msn', 'cellType': 'D2MSN'}, 'secs': {}} # cell rule dict
# This is how to manipulate cellular parameters but we do not need these now
#cellRule_d1['secs']['soma_0'] = {'geom': {}, 'mechs': {}} # soma_0 params dict
#cellRule_d1['secs']['soma_0']['geom'] = {'diam': 18.8, 'L': 18.8, 'Ra': 123.0} # soma_0 geometry
#cellRule['secs']['soma_0']['mechs']['d2msn'] = {'gnabar': 0.12, 'gkbar': 0.036, 'gl': 0.003, 'el': -70} # soma_0 hh mechanism
###################
cellRule_d1['secs']['soma_0']['vinit'] = -80
cellRule_d1['secs']['soma_0']['vinit'] = -80
netParams_d1.cellParams['D1MSN'] = cellRule_d1 # add dict to list of cell params
netParams_d2.cellParams['D2MSN'] = cellRule_d2
# Synaptic mechanism parameters
netParams_d1.synMechParams['AMPA'] = {'mod': 'AMPA', 'tau_r': 15.3604, 'tau_d': 1.55121, 'gbar': 8.5e-04}
netParams_d1.synMechParams['GABA'] = {'mod': 'GABA', 'tau_r': 2.3578, 'tau_d': 7.7747}
netParams_d2.synMechParams['AMPA'] = {'mod': 'AMPA', 'tau_r': 15.3604, 'tau_d': 1.55121, 'gbar': 9e-04}
netParams_d2.synMechParams['GABA'] = {'mod': 'GABA', 'tau_r': 2.3578, 'tau_d': 7.7747}
s
# Stimulation parameters
netParams_d1.stimSourceParams['AMPA'] = {'type': 'NetStim', 'rate': 10, 'noise': 0.5, 'start': 1}
netParams_d1.stimTargetParams['AMPA->D1MSN'] = {'source': 'AMPA', 'conds': {'pop': 'D1MSN'}, 'weight': 0.1, 'delay': 'uniform(1,5)'}
netParams_d2.stimSourceParams['AMPA'] = {'type': 'NetStim', 'rate': 10, 'noise': 0.5, 'start': 1}
netParams_d2.stimTargetParams['AMPA->D2MSN'] = {'source': 'AMPA', 'conds': {'pop': 'D2MSN'}, 'weight': 0.1, 'delay': 'uniform(1,5)'}
cellRule_d1['secs']['soma_0']['spikeGenLoc'] = 1.0
cellRule_d2['secs']['soma_0']['spikeGenLoc'] = 1.0
netParams_d1.stimSourceParams['Input_1'] = {'type': 'IClamp', 'del': 300, 'dur': 100, 'amp': 200}
netParams_d1.stimSourceParams['Input_4'] = {'type': 'NetStim', 'interval': 'uniform(20,100)', 'start': 600, 'noise': 0.1}
netParams_d1.stimTargetParams['Input_1->D1MSN'] = {'source': 'Input_1', 'sec':'soma_0', 'loc': 0.8, 'conds': {'pop':'D1MSN', 'cellList': range(10)}}
netParams_d1.stimTargetParams['Input_4->D1MSN'] = {'source': 'Input_4', 'sec':'soma_0', 'loc': 0.5, 'weight': '0.1+normal(0.2,0.05)','delay': 1,
'conds': {'cellType':'D1', 'ynorm': [0.6,1.0]}}
netParams_d1.stimSourceParams['bkg'] = {'type': 'NetStim', 'rate': 10, 'noise': 0.5}
netParams_d1.stimTargetParams['bkg->D1'] = {'source': 'bkg', 'conds': {'cellType': 'D1'}, 'weight': 0.01, 'delay': 5, 'synMech': 'exc'}
# Connectivity parameters
netParams_d1.connParams['D1MSN->D1MSN'] = {
'preConds': {'pop': 'D1MSN'},
'postConds': {'pop': 'D1MSN'},
'sec':'soma_0',
'synMech':'AMPA',
'weight': 0.01, # weight of each connection
'delay': '0.1+normal(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
'probability': 0.4}
netParams_d2.connParams['D1MSN->D2MSN'] = {
'preConds': {'pop': 'D1MSN'},
'postConds': {'pop': 'D2MSN'},
'sec':'soma_0',
'synMech':'GABA',
'weight': 0.01, # weight of each connection
'delay': '0.2+normal(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
'probability': 0.4}
netParams_d2.connParams['D2MSN->D1MSN'] = {
'preConds': {'pop': 'D2MSN'},
'postConds': {'pop': 'D1MSN'},
'sec':'proximal',
'synMech':'AMPA',
'weight': 0.01, # weight of each connection
'delay': '0.2+normal(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
'probability': 0.4}
netParams_d2.connParams['D2MSN->D1MSN'] = {
'preConds': {'pop': 'D2MSN'},
'postConds': {'pop': 'D1MSN'},
'sec':'proximal',
'synMech':'GABA',
'weight': 0.01, # weight of each connection
'delay': '0.2+normal(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
'probability': 0.4}
##########
#netParams_d2.connParams['D2MSN->D2MSN'] = {
# 'preConds': {'pop': 'D2MSN'},
# 'postConds': {'pop': 'D2MSN'},
# 'sec':'dend',
# 'synMech':'AMPA',
# 'weight': 0.01, # weight of each connection
# 'delay': '0.2+normal(13.0,1.4)', # delay min=0.2, mean=13.0, var = 1.4
# 'probability': 0.4} # probability of connection
##########
###############################################################################
# SIMULATION PARAMETERS
###############################################################################
# Simulation parameters
simConfig.duration = 1*200 # Duration of the simulation, in ms
simConfig.dt = 0.025 # Internal integration timestep to use
simConfig.seeds = {'conn': 1, 'stim': 1, 'loc': 1} # Seeds for randomizers (connectivity, input stimulation and cell locations)
simConfig.createNEURONObj = 1 # create HOC objects when instantiating network
simConfig.createPyStruct = 1 # create Python structure (simulator-independent) when instantiating network
simConfig.verbose = True # show detailed messages
# Recording
simConfig.recordCells = [10] # which cells to record from
simConfig.recordTraces = {'Vsoma_0':{'sec':'soma_0','loc':0.5,'var':'v'}}
simConfig.recordStim = True # record spikes of cell stims
simConfig.recordStep = 0.1 # Step size in ms to save data (eg. V traces, LFP, etc)
# Saving
simConfig.filename = 'msn_net' # Set file output name
simConfig.saveFileStep = 1000 # step size in ms to save data to disk
simConfig.savePickle = False # Whether or not to write spikes etc. to a .mat file
# Analysis and plotting
simConfig.analysis['plotRaster'] = True # Plot raster
simConfig.analysis['plotTraces'] = {'include': [('D1MSN',0)]}
# simConfig.analysis['plotTraces'] = {'include': [('D1MSN',0)]}
simConfig.analysis['plot2Dnet'] = True # Plot 2D net cells and connections
#simConfig.recordLFP = [[-15, y, 1.0*netParams_d2.sizeZ] for y in range(netParams_d2.sizeY/5, netParams_d2.sizeY, netParams_d2.sizeY/5)]
#simConfig.analysis['plotLFP'] = True
sim.createSimulateAnalyze(netParams_d1, simConfig)
|
985,921 | a7baa4e1d3f44d686631aa064220f3f8499e625d | #Write a program mydoc.py to implement the functionality of pydoc. The program should take the module name as argument and print documentation for the module and each of the functions defined in that module.
import sys
__import__(sys.argv[1])
print 'Help on module',sys.argv[1]
print '\n\n\nDESCRIPTION\n\n\n'
print __import__(sys.argv[1]).__doc__
print '\n\n\nFUNCTIONS\n\n\n'
for i in dir(sys.argv[1]):
print i,()
|
985,922 | eef7cc28a297de07510f216c1986cee00bbe5bf2 | from textwrap import dedent
import os
import subprocess
import numpy
import pandas
from wqio.tests import helpers
from wqio.utils import numutils
def _sig_figs(x):
""" Wrapper around `utils.sigFig` (n=3, tex=True) requiring only
argument for the purpose of easily "apply"-ing it to a pandas
dataframe.
"""
return numutils.sigFigs(x, n=3, tex=True)
def refresh_index(df):
""" gets around weird pandas block manager bugs that rise with
deeply nested indexes
"""
if isinstance(df.index, pandas.MultiIndex):
return df.reset_index().set_index(df.index.names)
else:
return df
def get_level_position(df, levelname):
_names = numpy.array(df.index.names)
ri, = numpy.nonzero(_names == levelname)
return ri[0]
def sanitizeTex(texstring):
""" Cleans up overly eager LaTeX renderings from pandas.
Parameters
----------
texstring : string
The string of LaTeX code to be cleaned up
Returns
-------
sanitized : string
Cleaned up LaTeX string.
"""
newstring = (
texstring.replace(r"\\%", r"\%")
.replace(r"\\", r"\tabularnewline")
.replace("\$", "$")
.replace("\_", "_")
.replace("ug/L", "\si[per-mode=symbol]{\micro\gram\per\liter}")
.replace(r"\textbackslashtimes", r"\times")
.replace(r"\textbackslash", "")
.replace(r"\textasciicircum", r"^")
.replace("\{", "{")
.replace("\}", "}")
)
return newstring
def csvToTex(
csvpath,
na_rep="--",
float_format=_sig_figs,
pcols=15,
addmidrules=None,
replaceTBrules=True,
replacestats=True,
):
""" Convert data in CSV format to a LaTeX table
Parameters
----------
csvpath : string
Full name and file path of the input data file.
na_rep : string, default "--"
How NA values should be written.
float_format : callable (default = `_sig_figs`)
Single input function that will return the correct
representation of floating point numbers.
pcols : int (default = 15)
Width of the columns for the LaTeX table.
addmidrules : string or list of strings, optional
(List of) string(s) to be replaced with "\midrule".
replaceTBrules : bool, default = True
When True, replaces "\toprule" and "\bottomrule" with
"\midrule".
replacestats : bool, default = True
When True, the labels of statistics are cleaned up a bit (e.g.,
"75%" -> "75th Percentile")
Returns
-------
None
"""
# read in the data pandas
data = pandas.read_csv(csvpath, parse_dates=False, na_values=[na_rep])
# open a new file and use pandas to dump the latex and close out
# with open(texpath, 'w') as texfile:
latex = data.to_latex(float_format=float_format, na_rep=na_rep, index=False)
if pcols > 0:
lines = []
header, rest_of_file = latex.split("\n", maxsplit=1)
# createa a bew header
header_sections = header.split("{")
old_col_def = header_sections[-1][:-1]
new_col_def = ""
for n in range(len(old_col_def)):
if n == 0:
new_col_def = new_col_def + "l"
new_col_def = new_col_def + "x{%smm}" % pcols
lines.append(header.replace(old_col_def, new_col_def))
if replaceTBrules:
rest_of_file = rest_of_file.replace("\\toprule", "\\midrule")
rest_of_file = rest_of_file.replace("\\bottomrule", "\\midrule")
if replacestats:
rest_of_file = rest_of_file.replace("std", "Std. Dev.")
rest_of_file = rest_of_file.replace("50\\%", "Median")
rest_of_file = rest_of_file.replace("25\\%", "25th Percentile")
rest_of_file = rest_of_file.replace("75\\%", "75th Percentile")
rest_of_file = rest_of_file.replace("count", "Count")
rest_of_file = rest_of_file.replace("mean", "Mean")
rest_of_file = rest_of_file.replace("min ", "Min. ")
rest_of_file = rest_of_file.replace("max", "Max.")
# XXX: omg hack
rest_of_file = rest_of_file.replace("AluMin.um", "Aluminum")
if addmidrules is not None:
if hasattr(addmidrules, "append"):
for amr in addmidrules:
rest_of_file = rest_of_file.replace(amr, "\\midrule\n%s" % amr)
else:
rest_of_file = rest_of_file.replace(amr, "\\midrule\n%s" % addmidrules)
lines.append(rest_of_file)
return sanitizeTex("\n".join(lines))
def csvToXlsx(csvpath, xlsxpath, na_rep="--", float_format=None):
""" Convert data in CSV format to an Excel workbook
Parameters
----------
csvpath : string
Full name and file path of the input data file.
xlsxpath : string
Full name and file path of the output .xlsx file.
na_rep : string (default = "--")
How NA values should be represented.
float_format : callable, optional
Single input function that will return the correct
representation of floating point numbers.
Returns
-------
None
"""
# read in the data pandas
data = pandas.read_csv(csvpath, parse_dates=False, na_values=[na_rep])
# use pandas to dump the excel file and close out
data.to_excel(xlsxpath, float_format=float_format, na_rep=na_rep, index=False)
def makeTexTable(
tablefile, caption, sideways=False, footnotetext=None, clearpage=False, pos="h!"
):
""" Creates a table block for a LaTeX document. Does not add it any
file.
Parameters
----------
tablefile : string
Name of the .tex file that actually contains the table.
caption : string
Caption/title that should be given to the table.
sideways : bool (default = False)
When True, a landscape table block is produced. Otherwise, the
table is in portrait mode.
footnotetext : string, optional
Any text that should be added as a footnote.
clearpage : bool (default = False)
When True, a "\clearpage" command is appended to the end of the
table block.
pos : string (default = "h!")
LaTeX float position specification. Default values tries its
best to place the table where the block appears in the LaTeX
document.
Returns
-------
tablestring : string
The table block text that can be -- but has not been -- added
to a LaTeX document.
"""
if sideways:
tabletype = "sidewaystable"
clearpage = True
else:
tabletype = "table"
if clearpage:
clearpagetext = r"\clearpage"
else:
clearpagetext = ""
if footnotetext is None:
notes = ""
else:
notes = footnotetext
tablestring = (
dedent(
r"""
\begin{%s}[%s]
\rowcolors{1}{CVCWhite}{CVCLightGrey}
\caption{%s}
\centering
\input{%s}
\end{%s}
%s
%s
"""
)
% (tabletype, pos, caption, tablefile, tabletype, notes, clearpagetext)
)
return tablestring
def makeLongLandscapeTexTable(df, caption, label, footnotetext=None, index=False):
""" Create a multi-page landscape label for a LaTeX document.
Parameters
----------
df : pandas.DataFrame
Dataframe to be turned into the table.
caption : string
Caption/title to be given to the table.
label : string
Unique identifier for references to table within LaTeX.
footnotetext : string, optional
Any text that should be added as a footnote.
index : bool (default = False)
Toggles the inclusion of the dataframe's index in to the table.
Default behavior omits it.
Returns
-------
tablestring : string
The table block text that can be -- but has not been -- added
to a LaTeX document.
"""
if footnotetext is None:
notes = ""
else:
notes = footnotetext
tabletexstring = df.to_latex(index=index, float_format=_sig_figs, na_rep="--")
valuelines = tabletexstring.split("\n")[4:-3]
valuestring = "\n".join(valuelines)
def _multicol_format(args):
n, col = args
if n == 0:
align = "l"
else:
align = "p{16mm}"
return r"\multicolumn{1}{%s}{%s}" % (align, col.replace("%", r"\%"))
dfcols = df.columns.tolist()
colalignlist = ["c"] * len(dfcols)
colalignlist[0] = "l"
colalignment = "".join(colalignlist)
col_enum = list(enumerate(dfcols))
columns = " &\n ".join(list(map(_multicol_format, col_enum)))
tablestring = (
dedent(
r"""
\begin{landscape}
\centering
\rowcolors{1}{CVCWhite}{CVCLightGrey}
\begin{longtable}{%s}
\caption{%s} \label{%s} \\
\toprule
%s \\
\toprule
\endfirsthead
\multicolumn{%d}{c}
{{\bfseries \tablename\ \thetable{} -- continued from previous page}} \\
\toprule
%s \\
\toprule
\endhead
\toprule
\rowcolor{CVCWhite}
\multicolumn{%d}{r}{{Continued on next page...}} \\
\bottomrule
\endfoot
\bottomrule
\endlastfoot
%s
\end{longtable}
\end{landscape}
%s
\clearpage
"""
)
% (
colalignment,
caption,
label,
columns,
len(dfcols),
columns,
len(dfcols),
valuestring,
notes,
)
)
return tablestring
def makeTexFigure(figFile, caption, pos="hb", clearpage=True):
""" Create the LaTeX for include a figure in a document. Does not
actually add it to any document.
Parameters
----------
figfile : string
Name of the image (.pdf) file that actually contains the figure.
caption : string
Caption/title that should be given to the table.
sideways : bool (default = False)
When True, a landscape table block is produced. Otherwise, the
table is in portrait mode.
footnotetext : string, optional
Any text that should be added as a footnote.
clearpage : bool (default = False)
When True, a "\clearpage" command is appended to the end of the
table block.
pos : string (default = "h!")
LaTeX float position specification. Default values tries its
best to place the table where the block appears in the LaTeX
document.
Returns
-------
tablestring : string
The table block text that can be -- but has not been -- added
to a LaTeX document.
"""
if clearpage:
clearpagetext = r"\clearpage"
else:
clearpagetext = ""
figurestring = (
dedent(
r"""
\begin{figure}[%s] %% FIGURE
\centering
\includegraphics[scale=1.00]{%s}
\caption{%s}
\end{figure} %% FIGURE
%s
"""
)
% (pos, figFile, caption, clearpagetext)
)
return figurestring
def processFilename(filename):
""" Sanitizes a filename for LaTeX. DON'T feed it a full path.
Parameters
----------
filename : string
The name of the file to be sanitized.
Returns
-------
sanitized : string
Mutated filename without characters that might cause errors in
LaTeX.
Example
-------
>>> processFilename('FigureBenzo/Inzo_1')
'FigureBenzoInzo1'
"""
badchars = [" ", ",", "+", "$", "_", "{", "}", "/", "&"]
fn = filename
for bc in badchars:
fn = fn.replace(bc, "")
return fn
def setMPLStyle(serif=False):
if serif:
fontfamily = "serif"
preamble = [
r"\usepackage{siunitx}",
r"\sisetup{detect-all}",
r"\usepackage{fourier}",
]
else:
fontfamily = "sans-serif"
preamble = [
r"\usepackage{siunitx}",
r"\sisetup{detect-all}",
r"\usepackage{helvet}",
r"\usepackage{sansmath}",
r"\sansmath",
]
style_dict = {
"text.usetex": True,
"font.family": [fontfamily],
"font.serif": ["Utopia", "Palantino"],
"font.sans-serif": ["Helvetica", "Arial"],
"lines.linewidth": 0.5,
"patch.linewidth": 0.5,
"text.latex.preamble": preamble,
"axes.linewidth": 0.5,
"axes.grid": True,
"axes.titlesize": 12,
"axes.labelsize": 10,
"xtick.labelsize": 10,
"xtick.direction": "out",
"ytick.labelsize": 10,
"ytick.direction": "out",
"grid.linewidth": 0.5,
"legend.fancybox": True,
"legend.numpoints": 1,
"legend.fontsize": 8,
"figure.figsize": (6.5, 3.5),
"savefig.dpi": 300,
}
matplotlib.rcParams.update(style_dict)
class LaTeXDirectory(object):
""" Context manager to help compile latex docs from python.
Switches to the latex document's folder and remains there while
inside the manager. The present working directory is restored once
the context manager exits.
Parameters
----------
texpath : string
The LaTeX source file or the directory in which it is found.
"""
def __init__(self, texpath):
self.home = os.getcwd()
if os.path.isfile(texpath):
self.texpath = os.path.dirname(texpath)
else:
self.texpath = texpath
def __enter__(self):
os.chdir(self.texpath)
return self
def __exit__(self, *args):
os.chdir(self.home)
def compile(self, texdoc, clean=False):
""" Compile a LaTeX document inside the context manager
Parameters
----------
texdoc : string
File name of a .tex file in the LaTeX directory
clean : bool (default = False)
When True, all of non-PDF files resulting from compilation
are removed. By default, they are left on the file system.
Returns
-------
tex : int or None
The status (1 or 0) of the compilation. If LaTeX is not
available, None is returned.
"""
if helpers.checkdep_tex() is not None:
# use ``pdflatex`` to compile the document
tex = subprocess.call(
["pdflatex", texdoc, "--quiet"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
)
if clean:
extensions = ["aux", "log", "nav", "out", "snm", "toc"]
for ext in extensions:
junkfiles = glob.glob("*.{}".format(ext))
for junk in junkfiles:
os.remove(junk)
else:
tex = None
return tex
|
985,923 | 523a6f61c70ec8efec2d623a45be4d2cd0f5997a | print("""6°). Escrever um programa que coleta a senha do usuário (previamente ajustada)
armazena a senha digitada em uma lista e retorna a quantidade de vezes que o
usuário precisou para digitar a senha correta.""")
lista = []
h = input("\nqual a senha: ")
lista.append(h)
while h != "senha correta":
h = input("tente novamente: ")
lista.append(h)
print("\n Quantidade de tentativas:\n ", len(lista), "\n Tentativas:\n ", lista)
|
985,924 | b231991bb14b7e7eb537ff49b6bd860829a6ef97 | """
高级函数方法
1。因为python是一种动态语音,可以给对象动态的绑定参数和方法,可以使用——slots——指定对象可以
绑定的方法
2。python可以使用@Property 声明一个属性的get,set方法。在操作属性的时候 会直接调用它的get,set方法
注意提供的方法名要和属性名一致
3.注意一点的是 @property方法内部的参数不能和方法名一样,否则出现递归无限调用。
要先调用set方法才能再调用 get方法,否则self 并没有和你声明的对象进行绑定。
"""
class SlotsObj(object):
##声明属性score的get方法
@property
def width(self):#width的set方法
print("start set width")
return self._width
@width.setter
def width(self, value):
self._width = value
@property
def resolution(self):
return self._width
def test():
slotsObj = SlotsObj()
slotsObj.name = "guo"
slotsObj.age = 33
##由于没有在slots 中声明可以绑定gradle对象 所以这里绑定不会成功,并且会失败
# slotsObj.gradle = 33
print(type(slotsObj))
##实际上调用的是setWidth方法,调用了self._width给 对象生成了一个 _width对象
slotsObj.width=200
print(slotsObj.width)
test()
|
985,925 | ecfa0894c9f8d7b0aba71437fdecac5589ccca08 | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 22 14:09:11 2019
@author: user
"""
import random
import numpy as np
import matplotlib.pyplot as plt
N=100
Potential=np.zeros((N,N))
print(Potential)
for i in range(N):
for j in range(N):
if j==0 or i==0 or i==N-1 or j==N-1:
pass
else:
Potential[i,j]=random.randint(-10,10)
count=0
VOld=np.sum(Potential)*(1/N)
VNew=0
while round(VNew/VOld,5)!=1:
count+=1
print(count)
print(VNew/VOld)
VOld=np.sum(Potential)*(1/N)
for i in range(N):
for j in range(N):
if j==0 or i==0 or i==N-1 or j==N-1:
pass
else:
Lpoint=Potential[i-1,j]
Rpoint=Potential[i+1,j]
Apoint=Potential[i,j-1]
Bpoint=Potential[i,j+1]
Potential[i,j]=0.25*(Lpoint+Rpoint+Apoint+Bpoint)
VNew=np.sum(Potential)*(1/N)
plt.imshow(Potential)
plt.colorbar()
plt.show()
plt.clf()
x=np.linspace(-2,2,N)
y=np.linspace(-2,2,N)
xv,yv=np.meshgrid(x,y)
plt.contourf(x,y,Potential)
plt.colorbar()
plt.xlabel('x position (m)')
plt.ylabel('y position (m)')
plt.show()
plt.clf()
print(count)
|
985,926 | 0be8c85ba6890820ed04b025c32417a25c58b868 | from socket import *
OPEN = bytes([0x02, 0x00, 0x2c, 0xff, 0x01, 0x00, 0x00, 0xD0, 0x03])
CLOSE = bytes([0x02, 0x00, 0x5e, 0xff, 0x01, 0x00, 0x00, 0xD2, 0x03])
HB = bytes([0x02, 0x00, 0x56, 0xff, 0x01, 0x02, 0x00, 0x1f, 0x96, 0x21, 0x03])
ALERT = bytes([0x02, 0x00, 0x18, 0xff, 0x01, 0x02, 0x00, 0x00, 0x00, 0xE6, 0x03])
ALERT_close = bytes([0x02, 0x00, 0x18, 0xff, 0x01, 0x02, 0x00, 0x01, 0x00, 0xE7, 0x03])
FIRE_ALERT = bytes([0x02, 0x00, 0x18, 0xff, 0x01, 0x02, 0x00, 0x00, 0x00, 0xE7, 0x03])
FIRE_ALERT_close = bytes([0x02, 0x00, 0x18, 0xff, 0x01, 0x02, 0x00, 0x01, 0x00, 0xE6, 0x03])
LOCK = bytes([0x02, 0x00, 0x2F, 0xFF, 0x01, 0x01, 0x00, 0x00, 0xD2, 0x03])
UNLOCK = bytes([0x02, 0x00, 0x2F, 0xFF, 0x01, 0x01, 0x00, 0x01, 0xD3, 0x03])
OPEN_NORMALLY = bytes([0x02, 0x00, 0x2D, 0xFF, 0x01, 0x00, 0x00, 0xD1, 0x03])
serverSocket = socket(AF_INET, SOCK_STREAM)
serverSocket.bind(("192.168.10.100", 8001))
serverSocket.listen(0)
print("Waiting for client")
connectionSocket, addr = serverSocket.accept()
print("Address : " , addr)
while True :
data = connectionSocket.recv(1024)
# if_main
if len(data) > 0 :
listData = list(map(hex, data))
print("data length : ", len(data))
print("Received Data : ", listData)
if listData[2] == "0x56":
print("HeartBeat")
connectionSocket.send(OPEN)
elif listData[2] == "0x2E":
print("Door Close")
connectionSocket.send(HB)
elif listData[2] == "0x2C":
print("Door Open")
connectionSocket.send(HB)
# end if_main
serverSocket.close()
|
985,927 | 5bec65825e0c8f7f404bbd94b95c99b0793bca94 | #!/usr/bin/env python
# --------------------
# Project Euler - Problem 3
#
# Title: Largest prime factor
#
# Description: The prime factors of 13195 are 5, 7, 13 and 29.
# What is the largest prime factor of the number 600851475143 ?
# Our number from above
number = 600851475143
# Divisor - starts at 2
divisor = 2
# Determine factors of our number that are prime
def lrg_prime(num, div) -> int:
while div < num:
evendiv = num % div == 0
quo = num / div
if evendiv and quo > 1:
num = quo
div = 2
else:
div = div + 1
return int(num)
# Largest prime factor
lpf = lrg_prime(number, divisor)
# Print the largest result
print(f"Largest prime factor: {lpf}")
|
985,928 | 11e81aa6da6c5ca74e6ba15f965704f35658f2d9 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 22 17:05:11 2021
@author: Gilly
"""
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
class Clean():
def __init__(self, data, none_cols = []):
'''
Parameters
----------
data : pandas DataFrame
features on which to train the model later.
none_cols : list
strings of column names in data where a missing value
represents the class None.
'''
self.data = data
self.none_cols = none_cols
self.get_imputes()
## Get imputes from training data to use with future data
def get_imputes(self):
'''
Generates a dictionary of the means of numerical values and
modes of categorical data from the training set used to
initialize this class.
'''
data = self.data
none_cols = self.none_cols
imputes = {}
cols = data.columns[~data.columns.isin(none_cols)]
for col in cols:
if data[col].dtype == 'object':
imputes[col] = data[col].mode()[0]
else:
imputes[col] = data[col].mean()
self.imputes = imputes
## Update imputes with new data
def update_imputes(self, new_data):
'''
Parameters
----------
new_data : pandas DataFrame
Previously unseen data with which we can update our
imputes dictionary. This is used to impute missing values
in this new dataset.
'''
data = new_data
imputes = self.imputes
n_0 = self.data.shape[0]
for col in data.columns:
if col not in imputes.keys():
if data[col].dtype == 'object':
imputes[col] = data[col].mode()[0]
else:
imputes[col] = data[col].mean()
elif data[col].dtype != 'object':
og = np.empty(n_0)
og.fill(imputes[col])
imputes[col] = np.mean(data[col].append(pd.Series(og)))
self.imputes = imputes
## Impute missing values function
def impute_missing(self, data):
'''
Parameters
----------
data : pandas DataFrame
dataframe on which to impute values from this class's
imputes dictionary object.
Returns
-------
data : pandas DataFrame
dataframe imputed with values from this class's
imputes dictionary object. If this is not the data used
to initialize this class, the method update_imputes should
be called before this method.
'''
imputes = self.imputes
none_cols = self.none_cols
for acol in none_cols:
data[acol] = data[acol].fillna('None')
for bcol in imputes.keys():
data[bcol] = data[bcol].fillna(imputes[bcol])
return data
class Process():
def __init__(self):
self.bins = {}
self.means = {}
def bin_numerics(self, data, column_bins):
for col in list(column_bins.keys()):
col_name = col + '_band'
data[col_name] = pd.cut(data[col], bins = column_bins[col])
return data
def mean_encode_train(self, data, column, target='target'):
'''
Mean encode variable in training set
'''
## Groupby column, take mean
mean_dic = data.groupby(column)[target].mean().to_dict()
## Save means for use later
self.means[column] = mean_dic
## Map mean to categorical
data[column] = data[column].map(mean_dic)
return data
def mean_encode_new(self, data, column):
'''
Mean encode variable for new data
'''
## Pull means from earlier
mean_dic = self.means[column]
## Map mean to categorical
data[column] = data[column].map(mean_dic)
return data
def one_hot_encode(self, data):
'''
One hot encode a pandas DataFrame
'''
cats = [col for col in data.columns if data[col].dtype == 'object']
cat_cols = data[cats]
temp_df = data.drop(cats, axis=1)
dummies = pd.get_dummies(cat_cols, drop_first=True)
data = pd.concat([temp_df, dummies], axis=1)
return data
def scale_numerics(self, data):
'''
Parameters
----------
data : pandas DataFrame
Returns
-------
data : pandas DataFrame
'''
data = data.copy()
## Select numeric features for modeling
cats = [col for col in data.columns if data[col].dtype == 'object']
nums = data[~cats]
for col in data.columns:
if col not in cats:
if col not in nums:
data = data.drop(col, axis=1)
# scalers = {}
# for col in nums:
# scaler = MinMaxScaler()
# data[col] = scaler.fit_transform(data[col])
# scalers[col] = scaler
# self.scalers = scalers
scaler = MinMaxScaler()
data[nums] = scaler.fit_transform(data[nums])
return data
def select(self, data, selected):
data = data.copy()
data = data[[col for col in selected if col in data.columns]]
return data
|
985,929 | 29083ea76681b0c72a17c17e2555c21000e6a99e | #!/usr/bin/env python3
# Reads a VCF or GZipped VCF and reformats it into
# Chromosome position, alleles, allele frequences and sample names
# Output is printed to stdout
# E.g.
#Chrom:Pos Alleles Freq 430-ZLIU 431-ZLIU 432-ZLIU 433-ZLIU 434-ZLIU ...
#1:11232 C,A,T 0.958,0.027,0.015 0/0 0/0 0/0 0/0 0/0 ...
#2:15213 A,C,T 0.977,0.022,0.002 0/0 0/0 0/0 0/0 0/0 ...
#3:15555 A,T,C 0.052,0.404,0.544 2/2 1/2 1/2 1/2 2/2 ...
import vcf
from optparse import OptionParser
def main():
parser = OptionParser(usage="usage: %prog -v input.vcf / input.vcf.gz")
parser.add_option("-v", "--vcf", dest="vcf_file", help="Path to VCF to be reformatted.")
(options, args) = parser.parse_args()
if not options.vcf_file:
print("No VCF specified, please specify with the -v flag.")
return -1
vcf_file = options.vcf_file
vcf_reader = vcf.Reader(filename=vcf_file)
header_printed = 0;
for record in vcf_reader:
if (header_printed == 0):
# Print header chromosome position, alleles, allele frequences and sample names
print ("Chrom:Pos\tAlleles\tFreq", end="\t")
for i in range(0,len(record.samples) - 1):
print (record.samples[i].sample, end='\t')
print (record.samples[len(record.samples) - 1].sample)
header_printed = 1
# Print site data
## Print chromosome position
print (record.CHROM, end=':')
print (record.POS, end='\t')
## Print alleles (starting with reference)
print (record.REF, end=',')
for i in range(0,len(record.ALT) - 1):
print (record.ALT[i], end=',')
print (record.ALT[len(record.ALT) - 1], end='\t')
## Print allele frequencies but recalculate them first
an = record.INFO['AN']
ac_ref = (an - sum(record.INFO['AC'])) / an
print ("{:.3f}".format(ac_ref), end=',')
for i in range(0,len(record.INFO['AC']) - 1):
print ("{:.3f}".format(record.INFO['AC'][i] / an), end=',')
print ("{:.3f}".format(record.INFO['AC'][len(record.INFO['AC']) - 1] / an), end='\t')
## Print genotype data
for i in range(0,len(record.samples) - 1):
print (record.samples[i]['GT'], end='\t')
print (record.samples[len(record.samples) - 1]['GT'])
if __name__ == "__main__":
main()
|
985,930 | 4184fba622055ffbdf0e4b1abf83be589fc1c9f6 | """Framework for compatible simulations under the /simulation directory"""
import random
import time
import logging
import os
import scipy.io
import pybullet as p
import pybullet_data
import abc
from arm_pytorch_utilities import rand
logger = logging.getLogger(__name__)
class Mode:
DIRECT = 0
GUI = 1
class ReturnMeaning:
SUCCESS = 0
ERROR = 1
REJECTED = 2
class _DefaultConfig:
DATA_DIR = './data'
class Simulation(abc.ABC):
def __init__(self, save_dir='raw', mode=Mode.GUI, log_video=False, plot=False, save=False,
num_frames=300, sim_step_s=1. / 240., config=_DefaultConfig):
# simulation meta
self.save_dir = os.path.join(config.DATA_DIR, save_dir)
self.mode = mode
self.log_video = log_video
# simulation config
self.num_frames = num_frames
self.sim_step_s = sim_step_s
# actions to do
self.plot = plot
self.save = save
# per run state variables
self.randseed = None
def run(self, randseed=None, run_name=None):
if randseed is None:
rand.seed(int(time.time()))
randseed = random.randint(0, 1000000)
logger.debug('random seed: %d', randseed)
self.randseed = randseed
rand.seed(randseed)
ret = self._configure_physics_engine()
if ret is not ReturnMeaning.SUCCESS:
return ret
ret = self._setup_experiment()
if ret is not ReturnMeaning.SUCCESS:
return ret
ret = self._init_data()
if ret is not ReturnMeaning.SUCCESS:
return ret
ret = self._run_experiment()
if ret is not ReturnMeaning.SUCCESS:
return ret
# plot data
if self.plot:
self._plot_data()
# save experiment
if self.save:
if not os.path.exists(self.save_dir):
os.makedirs(self.save_dir)
run_name = run_name if run_name is not None else randseed
save_to = os.path.join(self.save_dir, '{}.mat'.format(run_name))
# export in matlab/numpy compatible format
scipy.io.savemat(save_to, mdict=self._export_data_dict())
logger.info("Finished saving to {}".format(save_to))
return ReturnMeaning.SUCCESS
@abc.abstractmethod
def _configure_physics_engine(self):
return ReturnMeaning.SUCCESS
def _setup_experiment(self):
return ReturnMeaning.SUCCESS
def _init_data(self):
return ReturnMeaning.SUCCESS
@abc.abstractmethod
def _run_experiment(self):
return ReturnMeaning.SUCCESS
def _plot_data(self):
pass
def _export_data_dict(self):
return {}
class PyBulletSim(Simulation):
def __init__(self, realtime_simulation=False, **kwargs):
super(PyBulletSim, self).__init__(**kwargs)
self.physics_client = None
self.realtime = realtime_simulation
def _configure_physics_engine(self):
mode_dict = {Mode.GUI: p.GUI, Mode.DIRECT: p.DIRECT}
# if the mode we gave is in the dict then use it, otherwise use the given mode value as is
mode = mode_dict.get(self.mode) or self.mode
self.physics_client = p.connect(mode) # p.GUI for GUI or p.DIRECT for non-graphical version
if self.log_video:
p.startStateLogging(p.STATE_LOGGING_VIDEO_MP4, "{}.mp4".format(self.randseed))
# use data provided by PyBullet
p.setAdditionalSearchPath(pybullet_data.getDataPath()) # optionally
if self.realtime:
p.setRealTimeSimulation(True)
else:
p.setTimeStep(self.sim_step_s)
return ReturnMeaning.SUCCESS
def run(self, randseed=None, run_name=None):
# make sure to always disconnect after running
ret = super(PyBulletSim, self).run(randseed, run_name)
p.disconnect(self.physics_client)
return ret
|
985,931 | df29cfbda8fb3166e9571d810f328b9ee74c27ab | from flask import Blueprint, jsonify
from server.db.models import Person, Organization, Trip, TripSurvey
v1 = Blueprint('v1', __name__, url_prefix='/v1')
@v1.route("/organizations")
def list_organizations():
organizations = Organization.objects
return jsonify(list(map(lambda organization: organization.serialize(), organizations)))
@v1.route("/surveys")
def list_surveys():
surveys = TripSurvey.objects
return jsonify(list(map(lambda survey: survey.serialize(), surveys)))
@v1.route("/trips")
def list_trips():
trips = Trip.objects
return jsonify(list(map(lambda trip: trip.serialize(), trips)))
@v1.route("/people")
def list_people():
people = Person.objects
return jsonify(list(map(lambda person: person.serialize(), people)))
|
985,932 | 32ad3d44b5c6e36fb2e010f973b41e9013bd0775 | import torch
import numpy as np
import torch.nn as nn
import matplotlib.pyplot as plt
import SinGAN.customFuncs as customFuncs
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('--input_dir', help='input image dir', default='Input/Images3D/')
parser.add_argument('--input_name', help='input fig', default='simpleforest_80.pt')
parser.add_argument('--output_dir', help='output fig', default='Input/Editing3D/')
opt = parser.parse_args()
pathName = opt.input_name
fileName = pathName.split('.')[0]
inputDir = opt.input_dir
outputDir = opt.output_dir
original = torch.load(inputDir+pathName)
print('Tensor shape:', original.shape, " Unique values:", original.flatten().unique())
edited = original.clone().squeeze()
# Params
mode = 'editing'
'''
# for trees_80.pt
wnd = (slice(30, 70), slice(50, 74), slice(40, 70))
translation = (10, -50, 6) # Translation vector, used only in editing
'''
'''
# for simpleforest_80.pt
wnd = (slice(60, 80), slice(34, 60), slice(0, 40)) # x, y, z
translation = (-60, -34, 0) # Translation vector, used only in editing. Set to (0,0,0) to print the wnd selection
'''
# for simpleforest_80.pt
wnd = (slice(65, 75), slice(35, 50), slice(15, 35)) # x, y, z
translation = (-55, -15, 36) # Translation vector, used only in editing. Set to (0,0,0) to print the wnd selection
if mode == 'editing': # Move voxels to another location
wnd_to = tuple([slice(max(s.start + d, 0), s.stop + d) for s, d in zip(wnd, translation)]) # Compute target window by applying the translation vector to end
patch = torch.where(edited[wnd[0], wnd[1], wnd[2]] == -1, 0, 1) # Set to zero to highlight modified volume regions
edited[wnd_to[0], wnd_to[1], wnd_to[2]] = patch
elif mode == 'recovery': # Remove part of voxels from input
edited[wnd[0], wnd[1], wnd[2]] = 0 # 1 for empty
# Plot
edited = edited.unsqueeze(0).unsqueeze(0)
customFuncs.visualizeVolume(edited, title='Edited', show=False)
edited[edited == 0] = -1
# Compute mask
mask = torch.zeros_like(edited)
mask[0, 0, wnd_to[0], wnd_to[1], wnd_to[2]] = 1
customFuncs.visualizeMask(mask, title="Mask", show=False)
plt.show()
# Save result
torch.save(edited, outputDir+fileName+'.pt')
torch.save(mask, outputDir+fileName+'_mask.pt')
|
985,933 | 1adc7affedd15b30596efa3bfb21fbbde4e529c7 | import sys
from PySide2 import QtWidgets, QtGui
from PySide2 import QtWidgets, QtGui, QtCore
from strukture_dock import StructureDock
from workspace import WorkspaceWidget
'""Brisanje taba u app'''
def delete_tab(index):
central_widget.removeTab(index)
""'Otvaranje fajla Struck docka u terminalu""'
# def open_file(path):
# with open(path) as infile:
# txt = infile.read()
# print(txt)
''' Metoda dza ocitavanje fajla structura *Djape file* '''
def read_file(index):
path = structure_dock.model.filePath(index)
with open(path) as f:
text = (f.read())
new_workspace = WorkspaceWidget(central_widget)
central_widget.addTab(new_workspace, path.split("/")[-1])
new_workspace.show_text(text)
#TODO: Promeni u specifikaciju profesora
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
main_window = QtWidgets.QMainWindow()
main_window.resize(700, 600)
# Izgled prozora
main_window.setWindowTitle("Informacioni resursi prodaje")
icon = QtGui.QIcon("picture/icons8-edit-file-64.png")
main_window.setWindowIcon(icon)
#Meni bar
menu_bar = QtWidgets.QMenuBar(main_window)
file_menu = QtWidgets.QMenu("File",menu_bar)
edit_menu = QtWidgets.QMenu("Edit", menu_bar)
view_menu = QtWidgets.QMenu("View", menu_bar)
help_menu = QtWidgets.QMenu("Help", menu_bar)
open_menu = QtWidgets.QMenu("Open", menu_bar)
#Icon for menuAction
fileIcon = QtGui.QIcon("picture/icons8-edit-file-64.png")
file_menu.addAction(fileIcon, "New file") # Akcija menija
fileIcon = QtGui.QIcon("picture/print.png")
file_menu.addAction(fileIcon, "Print") # Akcija menija
file_menu.setToolTip("Open")
editIcon = QtGui.QIcon("picture/textedit.png")
edit_menu.addAction(editIcon, "Settings")# Akcija menija
edit_menu.setToolTip("Open")
viewIcon = QtGui.QIcon("picture/diplay.png")
view_menu.addAction(viewIcon, "Window") # Akcija menija
menu_bar.addMenu(file_menu)
menu_bar.addMenu(edit_menu)
menu_bar.addMenu(view_menu)
menu_bar.addMenu(help_menu)
menu_bar.addMenu(open_menu)
#ToolBar
tool_bar = QtWidgets.QToolBar(main_window)
t1 = QtGui.QIcon("picture/diplay.png")
tool_bar.addAction(t1, "Display")
t2 = QtGui.QIcon("picture/poruka.png")
tool_bar.addAction(t2, "Inbox")
t3 = QtGui.QIcon("picture/telefon.png")
tool_bar.addAction(t3, "Call")
t4 = QtGui.QIcon("picture/wifi.png")
tool_bar.addAction(t4, "Wi-fi")
#WorkspaceWidget
central_widget = QtWidgets.QTabWidget(main_window)
# text_edit = QtWidgets.QTextEdit(central_widget)
# central_widget.addTab(text_edit, QtGui.QIcon("picture/textedit.png"), "Tekstualni editor")
workspace = WorkspaceWidget(central_widget)
central_widget.addTab(workspace,QtGui.QIcon("picture/tabela.png"), "Prikaz tabele")
central_widget.tabCloseRequested.connect(delete_tab) #Brisanje taba
# structure_dock = QtWidgets.QDockWidget("Strukture dokumenata", main_window)
structure_dock = StructureDock("Strukture dokumenata", main_window)
structure_dock.tree.clicked.connect(read_file) # *Djape file*
# structure_dock.kliknut.connect(open_file)
# Akcija za strukture
toggle_structure_dock_action = structure_dock.toggleViewAction()
view_menu.addAction(toggle_structure_dock_action)
status_bar = QtWidgets.QStatusBar(main_window)
status_bar.showMessage("Status bar je prikazan...")
central_widget.setTabsClosable(True)
main_window.setMenuBar(menu_bar)
main_window.addToolBar(tool_bar)
main_window.addDockWidget(QtCore.Qt.LeftDockWidgetArea, structure_dock)
main_window.setCentralWidget(central_widget)
main_window.setStatusBar(status_bar)
# menu_bar.setParent(main_window)
# Kraj
main_window.show()
# menu_bar.setParent(main_window)
sys.exit(app.exec_())
|
985,934 | 1e8df6fa46d16d0085cd76b57dd7e12dcdc0a61d | import numpy as np
print ("Numpy => Numerical Python")
print(np.__version__)
arr = np.array([1, 2, 3, 4, 5])
print(arr)
|
985,935 | 91365f4f759a8cde0412965a137b3407aa83f58b | import numpy as np
class Main:
def __init__(self):
self.n, self.m = map(int, input().split())
self.arr = np.array([input().split() for i in range(self.n)], int)
def output(self):
print(np.mean(self.arr, axis=1), np.var(self.arr, axis=0), np.std(self.arr), sep='\n')
if __name__ == '__main__':
obj = Main()
obj.output()
|
985,936 | 6ac8644c24f7d8222f5002cdbf8dcb6e65afbb7c |
def remix(txt, lst):
txt_list = [i for i in txt]
new_dict = dict(zip(lst, txt_list))
return ''.join(list(new_dict.values()))
|
985,937 | ae961f6d96417e48fc1858ee738cbcf51b9974c2 | import sys
import json
import traceback
import scheduler
def main():
std_input = sys.stdin.read()
input_json = json.loads(std_input)
course_catalog = input_json['catalog']
required_courses = input_json['required_courses']
completed_courses = input_json['completed_courses']
max_courses_per_quarter = input_json['max_courses_per_quarter']
schedule = scheduler.create_schedule(
course_catalog,
required_courses,
max_courses_per_quarter=max_courses_per_quarter,
completed_courses=completed_courses
)
schedule_json = {}
for i, x in enumerate(schedule):
schedule_json[f'q{i+1}'] = x
print(json.dumps(schedule_json))
if __name__ == '__main__':
try:
main()
except Exception as e:
traceback.print_exc(file=sys.stdout)
|
985,938 | 2d6c6ca49e75f339a7e94fe497f327305f790e4d | """Restricted Sum
Our new calculator is censored and as such it does not accept certain words.
You should try to trick by writing a program to calculate the sum of numbers.
Given a list of numbers, you should find the sum of these numbers.
Your solution should not contain any of the banned words,
even as a part of another word.
The list of banned words are as follows:
sum
import
for
while
reduce
Input: A list of numbers.
Output: The sum of numbers.
Precondition: The small amount of data. Let's creativity win!
"""
from typing import List
def checkio(data: List[int]) -> int:
return 0 if not data else data.pop() + checkio(data)
if __name__ == "__main__":
assert checkio([1, 2, 3]) == 6
assert checkio([1, 2, 3, 4, 5, 6]) == 21
assert checkio([2, 2, 2, 2, 2, 2]) == 12
|
985,939 | d29bf45f55f47fcb52b205c9e22a90d86f64bec6 | import numpy as np
np.set_printoptions(suppress=True, precision=4)
import sys
import matplotlib.pyplot as plt
import sklearn.decomposition as decomp
import sklearn.linear_model as linear_model
import sklearn.datasets as sk_data
from sklearn.preprocessing import StandardScaler
import numpy.linalg as nla
import sklearn.svm as svm
import pandas as pd
def iqr_threshold_method(scores, margin):
q1 = np.percentile(scores, 25, interpolation='midpoint')
q3 = np.percentile(scores, 75, interpolation='midpoint')
iqr = q3-q1
lower_range = q1 - (1.5 * iqr)
upper_range = q3 + (1.5 * iqr)
lower_range = lower_range - margin
upper_range = upper_range + margin
return lower_range, upper_range
def get_projected_vectors(X, pca, ssX=None):
if not ssX:
ssX = StandardScaler().fit(X)
centered_data = ssX.transform(X)
reduced = pca.transform(centered_data)
return ssX.inverse_transform(pca.inverse_transform(reduced))
def do_pca_anomaly_scores(obs,
n_components):
ssX = StandardScaler()
centered_data = ssX.fit_transform(obs)
pca = decomp.PCA(n_components = n_components)
pca.fit(centered_data)
projected_vectors = get_projected_vectors(obs, pca)
return nla.norm(obs - projected_vectors, axis=1)
# currently the maximum value of pca score is considered as Anomaly
# hence only one anomaly is returned
# find a better way to get threshold value
# one could be determine outlier in pca score and use it as threshold
# similar issue with testing_svm_based
def PcaOutlier(data, n_components, margin=0):
"""Returns numpy array with data points labelled as outliers
Parameters
----------
data : numpy array like data_points
n_components : int, float, None or str
Number of components to keep.
margin : int, default=0
Margin of error
"""
pca_score = do_pca_anomaly_scores(data, n_components)
lower_range, upper_range = iqr_threshold_method(pca_score, margin)
anomaly_points = []
for i in range(len(pca_score)):
if pca_score[i] < lower_range or pca_score[i] > upper_range:
anomaly_points.append(data[i])
return anomaly_points
if __name__=='__main__':
pca_example = np.array([[-3, -1.5], [-2.5, -1.25], [-1.5, -0.75],
[-1, -0.5], [-0.5, -0.25], [0, 0], [0.5, 0.26],
[1, 0.5], [1.5, 0.75], [2.5, 1.25], [3, 1.5]])
res = PcaOutlier(pca_example, 1)
print(res)
|
985,940 | 5fd73af7dacf2007621524cf4b02371036c591f3 |
#calss header
class _OFFENDED():
def __init__(self,):
self.name = "OFFENDED"
self.definitions = offend
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['offend']
|
985,941 | a059477ad7869782d79be042871a1d391c80cb06 | import discord
from discord.ext.commands import check
from lifesaver.bot.context import LifesaverContext
def is_nsfw_or_dm():
def _check(context: LifesaverContext):
return (not context.guild) or (isinstance(context.channel, discord.TextChannel) and context.channel.nsfw)
return check(_check)
|
985,942 | 0a9219c9c68bc767f7048e24a49ed2d57492136b | import os
import csv
import tensorflow as tf
BUFFER_SIZE=10000
def load_dataset(datapath, vocab, dimesion=0, splits=["split_1", "split_2", "split_4", "split_8", "split_16"]):
dataset = []
data = csv.DictReader(open(datapath, "r"))
midi_paths = []
for row in data:
filepath, valence, arousal = row["filepath"], int(row["valence"]), int(row["arousal"])
for split in splits:
if split + "/" in filepath:
# Form midi filepath
piece_path = os.path.join(os.path.dirname(datapath), filepath)
# Form txt filepath
txt_file = os.path.splitext(piece_path)[0] + ".txt"
if os.path.exists(txt_file):
# Read txt file
tokens = []
with open(txt_file) as fp:
tokens = [vocab[w] for w in fp.read().split(" ")]
if dimesion == 0:
label = [valence]
elif dimesion == 1:
label = [arousal]
elif dimesion == 2:
label = [valence, arousal]
dataset.append((tokens, label))
return dataset
def build_dataset(dataset, batch_size):
tf_dataset = tf.data.Dataset.from_generator(lambda: dataset, (tf.int32, tf.int32))
tf_dataset = tf_dataset.shuffle(BUFFER_SIZE)
tf_dataset = tf_dataset.padded_batch(batch_size, padded_shapes=([None], [1]), padding_values=(1, 1))
return tf_dataset
|
985,943 | d60c5eeca9ad2ff650f9e1e48ce29d700d426d50 | import numpy as np
from DynamicVariable import DynamicVariable
#TODO override all operation methods to return random element
class UniformDistributedVariable(DynamicVariable):
def __init__(self,min,max):
self.min=min
self.max=max
self.value=np.random.uniform(low=min,high=max)
def assign(self, new_value):
self.value=new_value
def tick(self):
self.value=np.random.uniform(low=self.min,high=self.max)
if __name__=='__main__':
var=UniformDistributedVariable(0.,0.2)
print(var+0.)
print(var+1.)
var.tick()
print(var.value) |
985,944 | d7e91d87e7b66f0eacee7726aef6c9ce96b4ea57 | import numpy as np
from loss import *
class Activation:
def __init__(self):
self.inputs = None
self.output = None
self.gradient_inputs = None
class Activation_softmax(Activation):
def forward(self, inputs, training):
self.inputs = inputs
exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True))
normalized_values = exp_values / np.sum(exp_values, axis=1, keepdims=True)
self.output = normalized_values
def backward(self, gradient):
self.gradient_inputs = np.empty_like(gradient)
for index, (single_output, single_derivate) in enumerate(zip(self.output, gradient)):
single_output = single_output.reshape(-1, 1)
jacobian_matrix = np.diagflat(single_output) - np.dot(single_output, single_derivate.T)
self.gradient_inputs[index] = np.dot(jacobian_matrix, single_derivate)
def prediction(self, outputs):
return np.argmax(outputs, axis=1)
class Activation_ReLU(Activation):
def forward(self, inputs, training):
self.inputs = inputs
self.output = np.maximum(0, inputs)
def backward(self, gradient):
self.gradient_inputs = gradient.copy()
self.gradient_inputs[self.inputs < 0] = 0
def prediction(self, outputs):
return outputs
class Activation_Sigmoid(Activation):
def forward(self, inputs, training):
self.inputs = inputs
self.output = 1 / (1 + np.exp(-inputs))
def backward(self, gradient):
self.gradient_inputs = gradient * (1 - self.output) * self.output
def prediction(self, outputs):
return (outputs > 0.5) * 1
class Activation_Linear:
def forward(self, inputs, training):
self.inputs = inputs
self.output = inputs
def backward(self, gradient):
self.gradient_inputs = gradient.copy()
def prediction(self, outputs):
return outputs
class Activation_Softmax_Loss_CategoricalCrossentropy():
def __init__(self):
self.output = None
self.gradient_inputs = None
self.activation = Activation_softmax()
self.loss = Loss_CategoricalCrossentropy()
def forward(self, inputs, y_true):
self.activation.forward(inputs)
# Set the output
self.output = self.activation.output
return self.loss.calculate(self.output, y_true)
def backward(self, gradient, y_true):
samples = len(gradient)
if len(y_true.shape) == 2:
y_true = np.argmax(y_true, axis=1)
self.gradient_inputs = gradient.copy()
self.gradient_inputs[range(samples), y_true] -= 1
self.gradient_inputs = self.gradient_inputs / samples
class q_gabor():
def __init__(self, q, alpha=0.3, f=0.08, theta=0, k=1):
""""
:param alpha:
:param q: opening
:param f:
:param theta: angle
:param k:
"""
self.q = q
self.alpha = alpha
self.f = f
self.theta = theta
self.k = k
@staticmethod
def sinusoidal_function(f, x): # s(X) = e^(2*π*f*X*i)
s = pow(np.e, 2 * np.pi * f * x * 1j)
return s
@staticmethod
def q_exponential_function(x, q): # w(X) =1/(1+(1-q)*X^2)^(1/(1-q))
if q == 1:
w = pow(np.e, -np.pi * pow(x, 2))
else:
w = 1 / pow(1 + (1 - q) * x * x, 1 / (1 - q))
return w
def q_gabor_1d(self, x, alpha, q, f, theta, k): # g(X)=k*e^(theta*i)*w(alpha*X)*s(X)
sinusoidal = self.sinusoidal_function(f, alpha * x)
q_exponencial = self.q_exponential_function(x, q)
g = k * pow(np.e, (theta * 1j)) * sinusoidal * q_exponencial
return g
@staticmethod
def q_gabor_2d(x, y, q, k, u, v, p, a, b):
""""
:param x = data
:param y = data
:param q = opening
:param k = amplitude
:param u = X filter frequency
:param v = Y filter frequency
:param p = filter phase
:param a = envelope
:param b = envelope
"""
xo = yo = 0
w = k * (1 / ((1 + (1 - q) * ((a ** 2 * (x - xo) ** 2 + b ** 2 * (y - yo) ** 2))) ** (1 / (1 - q)))) #<- formula diferente!!!!
s = np.exp((2 * np.pi * (u * x + v * y) + p) * 1j)
g = w * s
return g
def q_gabor_activation(self, x):
""""
:param x: data
"""
x = tf.cast(x, dtype=tf.complex128)
g = self.q_gabor_1d(x, self.alpha, self.q, self.f, self.theta, self.k)
return tf.cast(g, dtype=tf.float32) |
985,945 | 2594dae6ba45623b9cd9cf4cc18f4c7648bedc7b | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from perm_equivariant_seq2seq.symmetry_groups import LanguageInvariance
class Language:
"""Object to keep track of languages to be translated.
Args:
name: (string) Name of language being used
"""
def __init__(self, name):
self.name = name
self.word2index = {}
self.word2count = {}
self.index2word = {0: "SOS", 1: "EOS"}
self.n_words = 2 # Count SOS and EOS
def add_sentence(self, sentence):
"""Process a sentence and add words to language vocabulary"""
for word in sentence.split(' '):
self.add_word(word)
def add_word(self, word):
"""Add a word to the language vocabulary"""
if word not in self.word2index:
self.word2index[word] = self.n_words
self.word2count[word] = 1
self.index2word[self.n_words] = word
self.n_words += 1
else:
self.word2count[word] += 1
class InvariantLanguage(Language):
"""Object to track a language with a fixed set of invariances
Args:
name (string): Name of language being used
invariances (list::invariances): A list of invariance objects representing language invariances.
"""
def __init__(self, name, invariances):
super(InvariantLanguage, self).__init__(name)
self.invariances = invariances
def add_word(self, word):
"""Add a word to language vocabulary"""
word = self.map_word(word)
super(InvariantLanguage, self).add_word(word)
def map_word(self, word):
"""Map a word to its equivalence class"""
for invariance in self.invariances:
word = invariance.map_word(word)
return word
def map_sentence(self, sentence):
"""Process a sentence and map all words to their equivalence classes"""
return ' '.join([self.map_word(word) for word in sentence.split(' ')])
class EquivariantLanguage(Language):
"""Object to track a language with a fixed (and known) set of equivariances
Args:
name (string): Name of language being used
equivariant_words (list::strings): List of words in language that are equivariant
"""
def __init__(self, name, equivariant_words):
super(EquivariantLanguage, self).__init__(name)
self.equivariant_words = equivariant_words
def rearrange_indices(self):
"""Rearrange the language indexing such that the first N words after the
Returns:
None
"""
num_fixed_words = 2
other_words = [w for w in self.word2index if w not in self.equivariant_words]
for idx, word in enumerate(self.equivariant_words):
w_idx = idx + num_fixed_words
self.word2index[word] = w_idx
self.index2word[w_idx] = word
for idx, word in enumerate(sorted(other_words)):
w_idx = idx + num_fixed_words + self.num_equivariant_words
self.word2index[word] = w_idx
self.index2word[w_idx] = word
@property
def num_equivariant_words(self):
return len(self.equivariant_words)
@property
def num_fixed_words(self):
return 2
@property
def num_other_words(self):
return len([w for w in self.word2index if w not in self.equivariant_words])
# Define SCAN language invariances
VERB_INVARIANCE = LanguageInvariance(['jump', 'run', 'walk', 'look'], 'verb')
DIRECTION_INVARIANCE = LanguageInvariance(['right', 'left'], 'direction')
CONJUNCTION_INVARIANCE = LanguageInvariance(['and', 'after'], 'conjunction')
ADVERB_INVARIANCE = LanguageInvariance(['once', 'twice', 'thrice'], 'adverb')
OTHER_INVARIANCE = LanguageInvariance(['around', 'opposite'], 'other')
def get_invariances(args):
"""Helper function to store some standard equivariances"""
invariances = []
if args.verb_invariance:
invariances.append(VERB_INVARIANCE)
if args.direction_invariance:
invariances.append(DIRECTION_INVARIANCE)
if args.conjunction_invariance:
invariances.append(CONJUNCTION_INVARIANCE)
if args.adverb_invariance:
invariances.append(ADVERB_INVARIANCE)
if args.other_invariance:
invariances.append(OTHER_INVARIANCE)
return invariances
|
985,946 | 42a5431ad68da6003324b7994377f256bc1e3124 |
def hardmod(divident, divisor):
quotient = divident
modulus = 0
# replicating division behavior
while quotient >= divisor:
quotient -= divisor
# finding modulus
if quotient < divisor:
modulus = quotient
return modulus
divident = float(input("Enter a divident: "))
divisor = float(input("Enter a divisor: "))
print(hardmod(divident, divisor))
|
985,947 | deda4af87ee61b2bc4fefc8dbb0fa04a5429111d | import json
import os
class WrkConfig(object):
def __init__(self, node_url, seconds, collections, threads, timeout, script_dir):
self.__node_url = node_url
self.__seconds = seconds
self.__collections = collections
self.__threads = threads
self.__timeout = timeout
self.__script_dir = script_dir
@classmethod
def read_config(cls, network="testnet"):
path = os.path.join(os.path.abspath(os.path.dirname(os.path.dirname(__file__))), "config.json")
with open(path, "r") as f:
configs = json.load(f)
if network not in configs:
print("unsupported network type " + network)
return WrkConfig(**configs[network])
@property
def node_url(self) -> str:
return self.__node_url
@property
def seconds(self) -> []:
return self.__seconds
@property
def collections(self) -> []:
return self.__collections
@property
def threads(self) -> int:
return self.__threads
@property
def timeout(self) -> int:
return self.__timeout
@property
def script_dir(self) -> str:
return self.__script_dir
|
985,948 | 2071c8f689e6b7c854f3700ed04bcd5d20e904bd | filename = "files/tempfile.txt"
try:
with open(filename) as f:
content = f.read()
except FileNotFoundError as error:
print("I am done reading a file",error.filename) |
985,949 | 534ecedbbbf5699e8e50fe6ce6fc1bb565d6f29f | from ext.debugger.elt.database import DatabaseClient
c = DatabaseClient(connect=False)
c.reconnect()
if c.connected:
c.close()
|
985,950 | 841896b1e2e97daa078623f82390cb1ef07ae7cc | #https://www.codewars.com/kata/57d814e4950d8489720008db/train/python
import math
def index(array, n):
if len(array) <= n:
return -1
else:
return math.pow(array[n], n)
def index(array, n):
return array[n]**n if n < len(array) else -1 |
985,951 | a0f741a5cbdc1525694c85f60dfc84498dd5528e | from ShifterFactory import Creator, CircularShifterCreator
from Input import Input
from SorterFactory import SorterCreator, AscendingSortCreator
from Output import Output
def client_code(creator: Creator, entrada, sorter: SorterCreator) -> str:
print("Client: I'm not aware of the creator's class, but it still works.")
#print(creator.some_operation(entrada))
sentences = creator.some_operation(entrada)
sortedSentences = sorter.some_operation(sentences)
print(sortedSentences)
return sortedSentences
if __name__ == "__main__":
print("App: Launched with the ConcreteCreator1.")
inp = Input().process('ex1.txt', 'stopWords.txt')
sentences = client_code(CircularShifterCreator(), inp, AscendingSortCreator())
Output().output(sentences)
print("\n") |
985,952 | b25bef5066a6e6a6784b8262fa34a9d3e765254b | # -*- coding:utf-8 -*-
import sys
"""解説
K = 6
4 3 1 1 2 2 1 1 1 2
という数列を考える。
4 3 1 1 2 2 1 1 1 2
|_|
4
4 3 1 1 2 2 1 1 1 2
|___|
12
Kを超えたので、左端の4で割ったあと、左端の位置を1すすめる
4 3 1 1 2 2 1 1 1 2
|_|
3
4 3 1 1 2 2 1 1 1 2
|_______|
6
4 3 1 1 2 2 1 1 1 2
|_________|
12
Kを超えたので、左端の3で割ったあと、左端の位置を1すすめる
4 3 1 1 2 2 1 1 1 2
|_____________|
4
4 3 1 1 2 2 1 1 1 2
|_______________|
8
Kを超えたので、左端の1で割ったあと、左端の位置を1すすめる
4 3 1 1 2 2 1 1 1 2
|_____________|
8
まだKを超えてるので、左端の1で割ったあと、左端の位置を1すすめる
4 3 1 1 2 2 1 1 1 2
|___________|
8
まだKを超えてるので、左端の2で割ったあと、左端の位置を1すすめる
4 3 1 1 2 2 1 1 1 2
|_________|
4
よって答えは7
[上の入力例]
10 6
4
3
1
1
2
2
1
1
1
2
"""
def solve():
N, K = list(map(int, sys.stdin.readline().split()))
Ss = []
for _ in range(N):
s = int(input())
Ss.append(s)
if K == 0:
# コーナーケース
if 0 in Ss:
print(N)
else:
print(0)
return
left = 0 # 左端
ans = 0 # 最終的に出力する答え
accum = 1 # 累積
for right in range(N):
accum *= Ss[right]
if accum == 0:
# 0を掛けた場合は例外で、Nを出力して終了
print(N)
return
while accum > K:
accum //= Ss[left]
left += 1
ans = max(ans, right-left+1)
print(ans)
if __name__ == "__main__":
solve()
|
985,953 | 00edd67456619c4317a2ccf89cc16cbcf9116c7c | import os
from datetime import datetime, date, time
import re
import json
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib import messages
from django.db import transaction
from django.db.models import Count
from django.http import HttpResponse, JsonResponse
from django.shortcuts import get_object_or_404, render, redirect
from django.views import View
from django.forms import formset_factory, modelformset_factory
from django.urls import reverse
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.template.loader import render_to_string
from django.core.mail import send_mail
from django.views.decorators.http import require_GET, require_POST
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import never_cache
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Field, HTML, Submit, Button, Row, Column
from crispy_forms.bootstrap import FormActions, TabHolder, Tab, Div
from .models import Sale, Refund, Basket, FringerType, Fringer, TicketType, Ticket, Donation, PayAsYouWill
from .forms import BuyTicketForm, RenameFringerForm, BuyFringerForm
from program.models import Show, ShowPerformance
# Logging
import logging
logger = logging.getLogger(__name__)
# Stripe interface
import stripe
stripe.api_key = settings.STRIPE_PRIVATE_KEY
class MyAccountView(LoginRequiredMixin, View):
def _create_fringer_formset(self, user, post_data=None):
FringerFormSet = modelformset_factory(Fringer, form = RenameFringerForm, extra = 0)
formset = FringerFormSet(post_data, queryset = user.fringers.exclude(sale__completed__isnull = True))
return formset
def _create_buy_fringer_form(self, fringer_types, user, post_data=None):
form = BuyFringerForm(user, fringer_types, post_data)
return form
def _get_context(self, request, tab, use_fringer_formset, buy_fringer_form):
# Get tickets grouped by performance
performances_current = []
performances_past = []
for ticket in request.user.tickets.filter(sale__completed__isnull = False, refund__isnull = True).order_by('performance__date', 'performance__time', 'performance__show__name').values('performance_id').distinct():
# Get list of performances and then get toickets for each performance
performance = ShowPerformance.objects.get(pk = ticket['performance_id'])
tickets = request.user.tickets.filter(performance_id = ticket['performance_id'], sale__completed__isnull = False, refund__isnull = True)
p = {
'id': performance.id,
'uuid': performance.uuid,
'show': performance.show.name,
'date' : performance.date,
'time': performance.time,
'tickets': [{'id': t.id, 'uuid': t.uuid, 'description': t.description, 'cost': t.cost, 'fringer_name': (t.fringer.name if t.fringer else None)} for t in tickets],
}
# Ok to compare naive datetimes since both are local
if datetime.combine(performance.date, performance.time) >= request.now:
performances_current.append(p)
else:
performances_past.append(p)
# Volunteer tickets
volunteer_tickets = request.user.tickets.filter(description='Volunteer').order_by('performance__date', 'performance__time', 'performance__show__name')
# Get online sales status
sales_closed = request.festival.online_sales_close and (request.now.date() > request.festival.online_sales_close)
sales_open = request.festival.online_sales_open and (request.now.date() >= request.festival.online_sales_open) and not sales_closed
context = {
'sales_closed': sales_closed,
'sales_open': sales_open,
'sales_open_date': request.festival.online_sales_open,
'tab': 'tickets',
'performances_current': performances_current,
'performances_past': performances_past,
'basket': request.user.basket,
'use_fringer_formset': use_fringer_formset,
'buy_fringer_form': buy_fringer_form,
'volunteer_earned': request.user.volunteer.comps_earned if request.user.is_volunteer else 0,
'volunteer_available': request.user.volunteer.comps_available if request.user.is_volunteer else 0,
'volunteer_tickets': volunteer_tickets,
}
return context
def get(self, request):
# Create fringer formset
use_fringer_formset = self._create_fringer_formset(request.user)
# Get fringer types and create buy form
fringer_types = FringerType.objects.filter(festival=request.festival, is_online=True)
buy_fringer_form = self._create_buy_fringer_form(fringer_types, request.user)
# Display tickets and fringers
context = self._get_context(request, 'tickets', use_fringer_formset, buy_fringer_form)
return render(request, 'tickets/myaccount.html', context)
@transaction.atomic
def post(self, request):
# Get the action and basket
action = request.POST.get("action")
basket = request.user.basket
fringer_types = FringerType.objects.filter(festival=request.festival, is_online=True)
use_fringer_formset = None
buy_fringer_form = None
# Check for rename
if action == "RenameFringers":
# Create fringer formset
use_fringer_formset = self._create_fringer_formset(request.user, request.POST)
# Check for errors
if use_fringer_formset.is_valid():
# Save changes
for fringer in use_fringer_formset.save():
logger.info(f"eFringer renamed to {fringer.name}")
messages.success(request, f"eFringer renamed to {fringer.name}")
# Reset formset
use_fringer_formset = None
# Check for buying
elif action == "AddFringers":
# Get fringer types and create form
buy_fringer_form = self._create_buy_fringer_form(fringer_types, request.user, request.POST)
# Check for errors
if buy_fringer_form.is_valid():
# Get fringer type
buy_type = get_object_or_404(FringerType, pk = int(buy_fringer_form.cleaned_data['type']))
buy_name = buy_fringer_form.cleaned_data['name']
if not buy_name:
fringer_count = Fringer.objects.filter(user = request.user).count()
buy_name = f"eFringer{fringer_count + 1}"
# Create new fringer and add to basket
fringer = Fringer(
user = request.user,
name = buy_name if buy_name else buy_type.name,
description = buy_type.description,
shows = buy_type.shows,
cost = buy_type.price,
payment = buy_type.payment,
basket = basket,
)
fringer.save()
logger.info(f"eFringer {fringer.name} ({fringer.description}) added to basket")
messages.success(request, f"Fringer {fringer.name} ({fringer.description}) added to basket")
# Confirm purchase
return redirect(reverse('tickets:myaccount_confirm_fringers'))
# Get tickets grouped by performance
performances_current, performances_past = self._get_performances(request.user)
# Create formset and buy form if not already done
if not use_fringer_formset:
use_fringer_formset = self._create_fringer_formset(request.user)
if not buy_fringer_form:
buy_fringer_form = self._create_buy_fringer_form(fringer_types, request.user)
# Redisplay with confirmation
context = self._get_context(request, 'fringers', use_fringer_formset, buy_fringer_form)
return render(request, 'tickets/myaccount.html', context)
class MyAccountConfirmFringersView(View):
def get(self, request):
# Render confirmation
return render(request, 'tickets/myaccount_confirm_fringers.html')
class BuyView(LoginRequiredMixin, View):
def get_ticket_formset(self, ticket_types, post_data=None):
TicketFormset = formset_factory(BuyTicketForm, extra = 0)
initial_data = [{'id': t.id, 'name': t.name, 'price': t.price, 'quantity': 0} for t in ticket_types]
return TicketFormset(post_data, initial = initial_data)
def _create_buy_fringer_form(self, fringer_types, user, post_data=None):
form = BuyFringerForm(user, fringer_types, post_data)
return form
def get(self, request, performance_uuid):
# Get basket and performance
basket = request.user.basket
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
ticket_types = TicketType.objects.filter(festival=request.festival, is_online = True)
fringer_types = FringerType.objects.filter(festival=request.festival, is_online=True)
# Check if ticket sales are still open
if performance.has_close_checkpoint:
return redirect(reverse('tickets:buy_closed', args = [performance.uuid]))
# Create buy ticket formset
ticket_formset = self.get_ticket_formset(ticket_types)
# Get fringers available for this perfromance
fringers = Fringer.get_available(request.user, performance)
# Create buy fringer form
buy_fringer_form = self._create_buy_fringer_form(fringer_types, request.user)
# Volunteer tickets
if request.user.is_volunteer:
volunteer_available = request.user.volunteer.comps_available
volunteer_used = request.user.tickets.filter(performance=performance, description='Volunteer')
else:
volunteer_available = 0
volunteer_used = True
# Display buy page
context = {
'tab': 'tickets',
'basket': basket,
'performance': performance,
'ticket_formset': ticket_formset,
'fringers': fringers,
'buy_fringer_form': buy_fringer_form,
'volunteer_available': volunteer_available,
'volunteer_used': volunteer_used,
}
return render(request, "tickets/buy.html", context)
@transaction.atomic
def post(self, request, performance_uuid):
# Get basket, performance and ticket/fringer types
basket = request.user.basket
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
ticket_types = TicketType.objects.filter(festival=request.festival, is_online = True)
fringer_types = FringerType.objects.filter(festival=request.festival, is_online=True)
# Get the requested action
action = request.POST.get("action")
tab = 'tickets'
# Add tickets to basket
if action == "AddTickets":
# Create ticket type formset
ticket_formset = self.get_ticket_formset(ticket_types, request.POST)
# Check for errors
if ticket_formset.is_valid():
# Get total number of tickets being purchased
tickets_requested = sum([f.cleaned_data['quantity'] for f in ticket_formset])
if (tickets_requested > 0) and (tickets_requested <= performance.tickets_available):
# Process ticket types
for form in ticket_formset:
# Get ticket type and quantity
ticket_type = get_object_or_404(TicketType, pk = form.cleaned_data['id'])
quantity = form.cleaned_data['quantity']
# Create tickets and add to basket
if quantity > 0:
for i in range(0, quantity):
ticket = Ticket(
performance = performance,
description = ticket_type.name,
cost = ticket_type.price,
user = request.user,
basket = basket,
payment = ticket_type.payment,
)
ticket.save()
# Confirm purchase
logger.info(f"{quantity} x {ticket_type.name} tickets for {performance.show.name} on {performance.date} at {performance.time} added to basket")
messages.success(request, f"{quantity} x {ticket_type.name} tickets added to basket.")
# Confirm purchase
return redirect(reverse('tickets:buy_confirm_tickets', args = [performance.uuid]))
# Insufficient tickets available
else:
logger.info(f"Insufficient tickets ({tickets_requested} requested, {performance.tickets_available} available) for {performance.show.name} on {performance.date} at {performance.time}")
messages.error(request, f"There are only {performance.tickets_available} tickets available for this perfromance.")
# Reset buy fringer form
buy_fringer_form = self._create_buy_fringer_form(fringer_types, request.user)
tab = 'tickets'
# Use fringer credits
elif action == "UseFringers":
# Check if there are still enough tickets available
tickets_requested = len(request.POST.getlist('fringer_id'))
if (tickets_requested > 0) and (tickets_requested <= performance.tickets_available):
# Create a sale
sale = Sale(
festival = request.festival,
user = request.user,
customer = request.user.email,
completed = timezone.now(),
)
sale.save()
# Process each checked fringer
for fringer_id in request.POST.getlist('fringer_id'):
# Get fringer and double check that it has not been used for this performance
fringer = Fringer.objects.get(pk = int(fringer_id))
if fringer.is_available(performance):
# Create ticket and add to sale
ticket = Ticket(
user = request.user,
performance = performance,
description = 'eFringer',
cost = 0,
fringer = fringer,
sale = sale,
payment = fringer.payment,
)
ticket.save()
# Confirm purchase
logger.info(f"Ticket for {performance.show.name} on {performance.date} at {performance.time} purchased with eFringer {fringer.name}")
messages.success(request, f"Ticket purchased with eFringer {fringer.name}")
else:
# Fringer already used for this performance
logger.warn(f"eFringer {fringer.name} already used for this perfromance")
# Confirm purchase
return redirect(reverse('tickets:buy_confirm_fringer_tickets', args = [performance.uuid]))
# Insufficient tickets available
else:
logger.info(f"Insufficient tickets ({tickets_requested} requested, {performance.tickets_available} available) for {performance.show.name} on {performance.date} at {performance.time}")
messages.error(request, f"There are only {performance.tickets_available} tickets available for this perfromance.")
# Reset ticket formset and buy fringer form
ticket_formset = self.get_ticket_formset(ticket_types)
buy_fringer_form = self._create_buy_fringer_form(fringer_types, request.user)
tab = 'fringers'
# Add fringer vouchers to basket
elif action == "AddFringers":
# Create buy fringer form
buy_fringer_form = self._create_buy_fringer_form(fringer_types, request.user, request.POST)
# Check for errors
if buy_fringer_form.is_valid():
# Get fringer type and name
fringer_type = get_object_or_404(FringerType, pk = int(buy_fringer_form.cleaned_data['type']))
fringer_name = buy_fringer_form.cleaned_data['name']
if not fringer_name:
fringer_count = Fringer.objects.filter(user = request.user).count()
fringer_name = f"eFringer{fringer_count + 1}"
# Create new fringer and add to basket
fringer = Fringer(
user = request.user,
name = fringer_name,
description = fringer_type.description,
shows = fringer_type.shows,
cost = fringer_type.price,
basket = basket,
)
fringer.save()
logger.info(f"eFringer {fringer.name} ({fringer.description}) added to basket")
messages.success(request, f"eFringer {fringer.name} ({fringer.description}) added to basket")
# Confirm purchase
return redirect(reverse('tickets:buy_confirm_fringers', args = [performance.uuid]))
# Reset ticket formset
ticket_formset = self.get_ticket_formset(ticket_types, None)
tab = 'fringers'
# Use fringer credits
elif action == "UseVolunteer":
# Check if there are still enough tickets available
if (performance.tickets_available > 0):
# Create a sale
sale = Sale(
festival = request.festival,
user = request.user,
customer = request.user.email,
completed = timezone.now(),
)
sale.save()
# Add volunteer ticket to sale
ticket = Ticket(
user = request.user,
performance = performance,
description = 'Volunteer',
cost = 0,
payment = 0,
sale = sale,
)
ticket.save()
# Confirm purchase
logger.info(f"Ticket for {performance.show.name} on {performance.date} at {performance.time} purchased using volunteer credit.")
messages.success(request, f"Volunteer ticket used")
# Confirm purchase
return redirect(reverse('tickets:buy_confirm_volunteer_ticket', args = [performance.uuid]))
# Insufficient tickets available
else:
logger.info(f"Insufficient tickets (1 requested, {performance.tickets_available} available) for {performance.show.name} on {performance.date} at {performance.time}")
messages.error(request, f"There are no tickets available for this perfromance.")
# Redisplay
tab = 'volunteers'
# Get fringers available for this performance
fringers = Fringer.get_available(request.user, performance)
# Volunteer tickets
if request.user.is_volunteer:
volunteer_available = request.user.volunteer.comps_available
volunteer_used = request.user.tickets.filter(performance=performance, description='Volunteer')
else:
volunteer_available = 0
volunteer_used = True
# Display buy page
context = {
'tab': tab,
'basket': basket,
'performance': performance,
'ticket_formset': ticket_formset,
'fringers': fringers,
'buy_fringer_form': buy_fringer_form,
'volunteer_available': volunteer_available,
'volunteer_used': volunteer_used,
}
return render(request, "tickets/buy.html", context)
class BuyClosedView(LoginRequiredMixin, View):
def get(self, request, performance_uuid):
# Get basket and performance
basket = request.user.basket
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
# Display closed page
context = {
'basket': basket,
'performance': performance,
}
return render(request, "tickets/buy_closed.html", context)
class BuyConfirmTicketsView(LoginRequiredMixin, View):
def get(self, request, performance_uuid):
# Get basket and performance
basket = request.user.basket
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
# Display confirmation
context = {
'basket': basket,
'performance': performance,
}
return render(request, "tickets/buy_confirm_tickets.html", context)
class BuyConfirmFringerTicketsView(LoginRequiredMixin, View):
def get(self, request, performance_uuid):
# Get basket and performance
basket = request.user.basket
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
# Display confirmation
context = {
'basket': basket,
'performance': performance,
}
return render(request, "tickets/buy_confirm_fringer_tickets.html", context)
class BuyConfirmFringersView(LoginRequiredMixin, View):
def get(self, request, performance_uuid):
# Get basket and performance
basket = request.user.basket
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
# Display confirmation
context = {
'basket': basket,
'performance': performance,
}
return render(request, "tickets/buy_confirm_fringers.html", context)
class BuyConfirmVolunteerTicketView(LoginRequiredMixin, View):
def get(self, request, performance_uuid):
# Get basket and performance
basket = request.user.basket
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
# Display confirmation
context = {
'basket': basket,
'performance': performance,
}
return render(request, "tickets/buy_confirm_volunteer_ticket.html", context)
class PAYWView(LoginRequiredMixin, View):
def get(self, request, show_uuid):
# Get show details
show = get_object_or_404(Show, uuid = show_uuid)
# Get fringers available
fringers = Fringer.get_available(request.user)
# Display PAYW page
context = {
'show': show,
'fringers': fringers,
}
return render(request, "tickets/payw.html", context)
@transaction.atomic
def post(self, request, show_uuid):
# Get show details
show = get_object_or_404(Show, uuid = show_uuid)
# Get the requested action
action = request.POST.get("action")
# Donate eFfringer credits
if action == "UseFringers":
# Check if any fringers are selected
fringer_ids = request.POST.getlist('fringer_id')
if fringer_ids:
# Create a sale
sale = Sale(
festival = request.festival,
user = request.user,
customer = request.user.email,
completed = timezone.now(),
)
sale.save()
logger.info(f"Sale { sale.id } crfeated for eFringer PAYW donation to { show.name }")
# Create donations for each fringer seleted
for fringer_id in fringer_ids:
# Get fringer and donate to this show
fringer = Fringer.objects.get(pk = int(fringer_id))
payw = PayAsYouWill(
sale = sale,
show = show,
fringer = fringer,
amount = fringer.payment,
)
payw.save()
# Confirm donation
logger.info(f"eFringer {fringer.name} PAYW donation added to sale { sale.id }")
messages.success(request, f"eFringer {fringer.name} credit donated to { show.name }")
# Return to show page
return redirect(reverse("program:show", args=[show.uuid]))
# No fringers selected so redisplay
messages.warning(request, f"No fringers selected for donation")
fringers = Fringer.get_available(request.user)
context = {
'show': show,
'fringers': fringers,
}
return render(request, "tickets/payw.html", context)
class CheckoutView(LoginRequiredMixin, View):
@method_decorator(never_cache)
def get(self, request):
# Get basket
basket = request.user.basket
# Cancel incomplete sales (can happen if user uses browser back button to return to checkout
# from Stripe payment page)
incomplete = request.user.sales.filter(boxoffice__isnull = True, venue__isnull = True, completed__isnull = True)
if incomplete:
for sale in incomplete:
for ticket in sale.tickets.all():
ticket.basket = basket
ticket.sale = None
ticket.save()
logger.info(f"{ticket.description} ticket for {ticket.performance.show.name} on {ticket.performance.date} at {ticket.performance.time} returned to basket {basket.user.id}")
for fringer in sale.fringers.all():
fringer.basket = basket
fringer.sale = None
fringer.save()
logger.info(f"eFringer {fringer.name} returned to basket {basket.user.id}")
sale.cancelled = timezone.now()
sale.save()
logger.info(f"Incomplete sale {sale.id} auto-cancelled")
messages.error(request, f"Payment cancelled. Your card has not been charged.")
# Display basket
context = {
'basket': basket,
'stripe_key': settings.STRIPE_PUBLIC_KEY,
}
return render(request, "tickets/checkout.html", context)
class CheckoutRemoveFringerView(LoginRequiredMixin, View):
@transaction.atomic
def get(self, request, fringer_uuid):
# Get basket and fringer to be removed
basket = request.user.basket
fringer = get_object_or_404(Fringer, uuid = fringer_uuid)
# Delete fringer
logger.info(f"eFringer {fringer.name} removed from basket {basket.user.id}")
messages.success(request, f"Fringer {fringer.name} removed from basket")
fringer.delete()
# Redisplay checkout
return redirect(reverse("tickets:checkout"))
class CheckoutRemovePerformanceView(LoginRequiredMixin, View):
@transaction.atomic
def get(self, request, performance_uuid):
# Get basket and performance
basket = request.user.basket
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
# Delete all tickets for the performance
for ticket in basket.tickets.filter(performance = performance):
logger.info(f"{ticket.description} ticket for {performance.show.name} on {performance.date} at {performance.time} removed from basket {basket.user.id}")
ticket.delete()
messages.success(request, f"{performance.show.name} removed from basket {basket.user.id}")
# Redisplay checkout
return redirect(reverse("tickets:checkout"))
class CheckoutRemoveTicketView(LoginRequiredMixin, View):
@transaction.atomic
def get(self, request, ticket_uuid):
# Get basket and ticket to be removed
basket = request.user.basket
ticket = get_object_or_404(Ticket, uuid = ticket_uuid)
# Delete ticket
logger.info(f"{ticket.description} ticket for {ticket.performance.show.name} on {ticket.performance.date} at {ticket.performance.time} removed from basket {basket.user.id}")
messages.success(request, f"{ticket.description} ticket for {ticket.performance.show.name} removed from basket")
ticket.delete()
# Redisplay checkout
return redirect(reverse("tickets:checkout"))
class CheckoutConfirmView(View):
def get(self, request, sale_uuid):
# Get sale
sale = get_object_or_404(Sale, uuid = sale_uuid)
# Render confirmation
context = {
'sale': sale,
}
return render(request, 'tickets/checkout_confirm.html', context)
@login_required
@require_POST
def checkout_stripe(request):
# Get basket
basket = request.user.basket
# Check that tickets are still available
tickets_available = True
for p in basket.tickets.values('performance').annotate(count = Count('performance')):
performance = ShowPerformance.objects.get(pk = p["performance"])
if p["count"] > performance.tickets_available:
messages.error(request, f"Your basket contains {p['count']} tickets for {performance.show.name} but there are only {performance.tickets_available} tickets available.")
logger.info(f"Basket contains {p['count']} tickets for {performance.show.name} but there are only {performance.tickets_available} available")
tickets_available = False
# If tickets no longer available redisplay checkout with notifications
if not tickets_available:
messages.error(request, "Your card has not been charged.")
context = {
'basket': basket
}
return render(request, "tickets/checkout.html", context)
# Use a transaction to protect the conversion of basket to sale and creation of Stripe session
with transaction.atomic():
# Move tickets and fringers from basket to sale
sale = Sale(
festival = request.festival,
user = request.user,
customer = request.user.email,
amount = basket.total_cost,
transaction_type = Sale.TRANSACTION_TYPE_STRIPE,
transaction_fee = 0,
)
sale.save()
logger.info(f"Sale {sale.id} created")
for ticket in basket.tickets.all():
ticket.basket = None
ticket.sale = sale
ticket.save()
logger.info(f"{ticket.description} ticket for {ticket.performance.show.name} on {ticket.performance.date} at {ticket.performance.time} added to sale {sale.id}")
for fringer in basket.fringers.all():
fringer.basket = None
fringer.sale = sale
fringer.save()
logger.info(f"eFringer {fringer.name} added to sale {sale.id}")
# Create Stripe session
stripe.api_key = settings.STRIPE_PRIVATE_KEY
session = stripe.checkout.Session.create(
client_reference_id = str(sale.id),
customer_email = basket.user.email,
payment_method_types = ['card'],
mode = 'payment',
line_items = [{
'price_data': {
'currency': 'GBP',
'unit_amount': int(sale.total_cost * 100),
'product_data': {
'name': 'Theatrefest',
'description': 'Tickets and eFringers',
},
},
'quantity': 1,
}],
success_url = request.build_absolute_uri(reverse('tickets:checkout_success', args=[sale.uuid])),
cancel_url = request.build_absolute_uri(reverse('tickets:checkout_cancel', args=[sale.uuid])),
)
sale.stripe_pi = session.payment_intent
sale.save()
logger.info(f"Stripe PI {session.payment_intent} created for sale {sale.id}")
return redirect(session.url, code=303)
@login_required
@require_GET
def checkout_success(request, sale_uuid):
# Get sale and mark as complete
sale = get_object_or_404(Sale, uuid = sale_uuid)
sale.completed = timezone.now()
sale.save()
logger.info(f"Stripe payment for sale {sale.id} succeeded")
logger.info(f"Credit card charged £{sale.total_cost:2f}")
logger.info(f"Sale {sale.id} completed")
# Send e-mail to confirm tickets
if sale.tickets:
context = {
'festival': request.festival,
'tickets': sale.tickets.order_by('performance__date', 'performance__time', 'performance__show__name')
}
body = render_to_string('tickets/sale_email.txt', context)
send_mail('Tickets for ' + request.festival.title, body, settings.DEFAULT_FROM_EMAIL, [request.user.email])
# Display confirmation
context = {
'sale': sale,
}
return render(request, 'tickets/checkout_confirm.html', context)
@login_required
@require_GET
def checkout_cancel(request, sale_uuid):
# Get basket and sale
basket = request.user.basket
sale = get_object_or_404(Sale, uuid = sale_uuid)
logger.info(f"Stripe payment for sale {sale.id} cancelled")
# Move sale items back into basket and delete sale
for ticket in sale.tickets.all():
ticket.basket = basket
ticket.sale = None
ticket.save()
logger.info(f"{ticket.description} ticket for {ticket.performance.show.name} on {ticket.performance.date} at {ticket.performance.time} returned to basket {basket.user.id}")
for fringer in sale.fringers.all():
fringer.basket = basket
fringer.sale = None
fringer.save()
logger.info(f"eFringer {fringer.name} returned to basket {basket.user.id}")
sale.cancelled = timezone.now()
sale.save()
logger.info(f"Sale {sale.id} cancelled")
# Display checkout with notification
messages.error(request, f"Payment cancelled. Your card has not been charged.")
context = {
'basket': basket
}
return render(request, "tickets/checkout.html", context)
@transaction.atomic
@login_required
def ticket_cancel(request, ticket_uuid):
# Get ticket to be cancelled
ticket = get_object_or_404(Ticket, uuid = ticket_uuid)
# Create a refund and add the ticket
refund = Refund(
festival = request.festival,
user = request.user,
customer = request.user.email,
completed = timezone.now(),
)
refund.save()
ticket.refund = refund
ticket.save()
logger.info(f"{ticket.description} ticket for {ticket.performance.show.name} on {ticket.performance.date} at {ticket.performance.time} cancelled")
messages.success(request, f"{ticket.description} ticket for {ticket.performance.show.name} cancelled")
# Redisplay tickets
return redirect(reverse("tickets:myaccount"))
@require_GET
def donations(request):
context = {
'stripe_key': settings.STRIPE_PUBLIC_KEY,
}
return render(request, 'tickets/donations.html', context)
@require_POST
@csrf_exempt
def donation_stripe(request):
amount = int(request.POST['donationAmount'])
email = request.POST['donationEmail']
try:
session = stripe.checkout.Session.create(
customer_email = email,
payment_method_types = ['card'],
line_items = [
{
'name': 'Theatrefest',
'description': 'Donation',
'amount': amount * 100,
'currency': 'GBP',
'quantity': 1,
},
],
mode = 'payment',
success_url = request.build_absolute_uri(reverse('tickets:donation_success') + f"?amount={amount}&email={email}"),
cancel_url = request.build_absolute_uri(reverse('tickets:donation_cancel')),
)
return redirect(session.url, code=303)
except Exception as e:
return HttpResponse('Error')
@require_GET
def donation_success(request):
# Save donation details
donation = Donation(
festival = request.festival,
amount = request.GET["amount"],
email = request.GET['email'],
)
donation.save()
logger.info("Donation of £%s received from %s", donation.amount, donation.email)
# Confirm donation received
messages.success(request, "Donaction completed")
return render(request, 'tickets/donation_success.html')
@require_GET
def donation_cancel(request):
messages.info(request, "Donaction cancelled")
return redirect(reverse("tickets:donations"))
# PDF generation
import os
from django.conf import settings
from reportlab.pdfgen.canvas import Canvas
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle, Paragraph, Spacer, Image
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.pagesizes import A4, portrait
from reportlab.lib.units import cm
from reportlab.lib import colors
class PrintSaleView(LoginRequiredMixin, View):
def get(self, request, sale_uuid):
# Get sale to be printed
sale = get_object_or_404(Sale, uuid = sale_uuid)
# Create receipt as a Platypus story
response = HttpResponse(content_type = "application/pdf")
response["Content-Disposition"] = f"filename=sale{sale.id}.pdf"
doc = SimpleDocTemplate(
response,
pagesize = portrait(A4),
leftMargin = 2.5*cm,
rightMargin = 2.5*cm,
topMargin = 2.5*cm,
bottomMargin = 2.5*cm,
)
styles = getSampleStyleSheet()
story = []
# Festival banner
if request.festival.banner:
banner = Image(request.festival.banner.get_absolute_path(), width = 18*cm, height = 4*cm)
banner.hAlign = 'CENTER'
story.append(banner)
story.append(Spacer(1, 1*cm))
# Customer and sale number
table = Table(
(
(Paragraph("<para><b>Customer:</b></para>", styles['Normal']), sale.customer),
(Paragraph("<para><b>Sale no:</b></para>", styles['Normal']), sale.id),
),
colWidths = (4*cm, 12*cm),
hAlign = 'LEFT'
)
story.append(table)
story.append(Spacer(1, 0.5*cm))
# Fringers
if sale.fringers.count():
tableData = []
for fringer in sale.fringers.all():
tableData.append(("eFringer", fringer.name, fringer.description, f"£{fringer.cost}"))
table = Table(
tableData,
colWidths = (4*cm, 4*cm, 4*cm, 4*cm),
hAlign = 'LEFT',
style = (
('ALIGN', (3, 0), (3, -1), 'RIGHT'),
)
)
story.append(table)
story.append(Spacer(1, 0.5*cm))
# Tickets
if sale.tickets:
is_first = True
for performance in sale.ticket_performances:
if not is_first:
story.append(Spacer(1, 0.3*cm))
is_first = False
tableData = []
tableData.append((Paragraph(f"<para>{performance['date']:%a, %e %b} at {performance['time']:%I:%M %p} - <b>{performance['show']}</b></para>", styles['Normal']), "", "", ""))
for ticket in performance['tickets']:
tableData.append((f"{ticket['id']}", "", ticket['description'], f"£{ticket['cost']}"))
table = Table(
tableData,
colWidths = (4*cm, 4*cm, 4*cm, 4*cm),
hAlign = 'LEFT',
style = (
('SPAN', (0, 0), (3, 0)),
('ALIGN', (0, 1), (0, -1), 'RIGHT'),
('ALIGN', (3, 1), (3, -1), 'RIGHT'),
)
)
story.append(table)
story.append(Spacer(1, 0.5*cm))
# Total
table = Table(
(
("", Paragraph("<para><b>Total:</b></para>", styles['Normal']), f"£{sale.amount}"),
),
colWidths = (8*cm, 4*cm, 4*cm),
hAlign = 'LEFT',
style = (
('ALIGN', (2, 0), (2, 0), 'RIGHT'),
)
)
story.append(table)
# Create PDF document and return it
doc.build(story)
return response
class PrintPerformanceView(LoginRequiredMixin, View):
def get(self, request, performance_uuid):
# Get performance to be printed
performance = get_object_or_404(ShowPerformance, uuid = performance_uuid)
# Create a Platypus story
response = HttpResponse(content_type = "application/pdf")
response["Content-Disposition"] = f"filename=performance{performance.id}.pdf"
doc = SimpleDocTemplate(
response,
pagesize = portrait(A4),
leftMargin = 2.5*cm,
rightMargin = 2.5*cm,
topMargin = 2.5*cm,
bottomMargin = 2.5*cm,
)
styles = getSampleStyleSheet()
story = []
# Festival banner
if request.festival.banner:
banner = Image(request.festival.banner.get_absolute_path(), width = 18*cm, height = 4*cm)
banner.hAlign = 'CENTER'
story.append(banner)
story.append(Spacer(1, 1*cm))
# Tickets
tableData = []
tableData.append((Paragraph(f"<para><b>{performance.show.name}</b></para>", styles['Normal']), "", "", ""))
tableData.append((f"{performance.date:%a, %e %b} at {performance.time:%I:%M %p}", "", "", ""))
for ticket in request.user.tickets.filter(performance_id = performance.id):
tableData.append((f"{ticket.id}", "", ticket.description, f"£{ticket.cost}"))
table = Table(
tableData,
colWidths = (4*cm, 4*cm, 4*cm, 4*cm),
hAlign = 'LEFT',
style = (
('SPAN', (0, 0), (3, 0)),
('SPAN', (0, 1), (3, 1)),
('ALIGN', (0, 2), (0, -1), 'RIGHT'),
('ALIGN', (3, 2), (3, -1), 'RIGHT'),
)
)
story.append(table)
# Create PDF document and return it
doc.build(story)
return response
|
985,954 | 9562095d03196eeee4fa0e810bc45cd25079e2b6 | from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.by import By
import time
# from selenium.webdriver.support.ui import Select
list1,list2 = [],[]
driver = webdriver.Chrome()
# driver.implicitly_wait(5) ##### implicitly_wait is applied globally every time driver is used
driver.get("https://www.rahulshettyacademy.com/seleniumPractise/")
key = 'ca'
driver.find_element_by_css_selector('.search-keyword').send_keys(key)
time.sleep(4)
count = len(driver.find_elements_by_css_selector('.products div.product'))
assert count == 4
buttons = driver.find_elements_by_css_selector('.product-action button')
# ('//div[@class="product-action"]/button/parent::div/parent::div/h4') ##### Traversing from child to parent
for btn in buttons:
list1.append(btn.find_element_by_xpath('parent::div/parent::div/h4').text)
btn.click()
# print(list1)
driver.find_element_by_css_selector('img[alt="Cart"]').click()
driver.find_element_by_xpath('//button[text()="PROCEED TO CHECKOUT"]').click()
time.sleep(4)
checkedout_products = driver.find_elements_by_xpath('//p[@class="product-name"]')
for chkp in checkedout_products:
list2.append(chkp.text)
# print(list2)
assert list1 == list2
sum = 0
price1 = driver.find_elements_by_xpath('//table[@class="cartTable"]/tr/td[5]/p')
for p in price1:
# print(p.text)
sum += int(p.text)
print(sum)
total_amt = int(driver.find_element_by_css_selector('.totAmt').text)
print(total_amt)
coupon_code = "rahulshettyacademy"
### Explicit wait
wait = WebDriverWait(driver,7)
wait.until(expected_conditions.presence_of_element_located((By.CSS_SELECTOR,'input[class="promoCode"]')))
driver.find_element_by_css_selector('input[class="promoCode"]').send_keys(coupon_code)
driver.find_element_by_class_name("promoBtn").click()
wait.until(expected_conditions.presence_of_element_located((By.CLASS_NAME,'promoInfo')))
print(driver.find_element_by_class_name("promoInfo").text)
|
985,955 | 52906429b379cb3fcfefc578746f21784f8bfcd5 | import time
import paho.mqtt.client as client
import ssl
import random
IoT_protocol = "x-amzn-mqtt-ca"
aws_end_point = "amwmnb948lrc8-ats.iot.ap-south-1.amazonaws.com"
ca = "./AmazonRootCA1.pem"
cert = "./69311293f0-certificate.pem.crt"
private = "./69311293f0-private.pem.key"
ssl_context = ssl.create_default_context()
ssl_context.set_alpn_protocols([IoT_protocol])
ssl_context.load_verify_locations(cafile=ca)
ssl_context.load_cert_chain(certfile=cert , keyfile=private)
mqttc = client.Client()
mqttc.tls_set_context(context = ssl_context)
mqttc.connect(aws_end_point,port=443)
mqttc.loop_start()
for i in range(5):
t = random.randint(20,45)
print("Message Publishing")
mqttc.publish("mytopic/iot" , f"Temperature:{t}")
time.sleep(2)
|
985,956 | c1e870c28d88f99f448127ee8bd94b751021d876 | # Expresiones regulares
# Secuencia especial de caracteres que ayuda a encontrar otras cadenas o conjuntos de cadenas utlizando una sintaxis mantenida en un patron.
# Modulo en python para regex: re
# Funciones en re
# match(patron, cadena, banderas), si hace match regresa un objeto de tipo match y si no regresa None
# search()
# findall()
# group()
# groups()
# sub()
import re
if re.match("hola", "hola"):
print("Hizo match")
if re.match(".ola", "tola"):
print("Hizo match")
if re.match("\.ola", ".ola"):
print("Hizo match")
if re.match("python|jython|cython", "cython"):
print("Hizo match")
if re.match("(p|j|c)ython", "python"):
print("Hizo match")
if re.match("[pjc]ython", "cython"):
print("Hizo match")
if re.match("niñ(o|a)s", "niños"):
print("Hizo match")
if re.match("cadena[0-9]","cadena1"):
print("Hizo match")
# [0-9] 0..9
# [a-z] a, b, c, d, ..., z
# [A-Z] A, B, C, D, ..., Z
# [0-9a-zA-Z]
# [a-f5-8]
# [az-]
# [.-.]
# [\.-]
# negacion
if re.match("python[^0-9a-z]", "pythonZ", re.IGNORECASE):
print("Hizo match")
# Cuantificadores
# +, *, ?, {}
# + al menos una vez
# * cero o mas veces
# ? puede o no estar 0 o 1
# {n} n = numero de veces exactas
if re.match("python+", "pythonnn"):
print("Hizo match")
if re.match("python*", "pytho"):
print("Hizo match")
if re.match("python?", "pytho"):
print("Hizo match")
if re.match("python{3,8}", "pythonnnn"):
print("Hizo match")
# .* cualquier cadena, de cualquier largo
# [a-z]{3,6} -> cvb,cvbg,fghjjl
# .*hola
# ? -> asdsdasdasdhola!, asdasdhola, hola
# b+? -> b
# (ab)+ ab, abab, ababab
# ^ debe ir al principio de la cadena
# $ debe ir al final de la cadena
if re.match("^http", "http://google.com"):
print("Hizo match")
if re.match("http$", "://google.comhttp"):
print("Hizo match")
if re.match("\Aa[0-9].*(end|fin)$", "a8sdfsdfsdfin"):
print("Hizo match")
if re.search("\Aa[0-9].*(end|fin)$", "a2 dfsdfs fsdf fin"):
print("Hizo match")
# match revisa cualquier match al inicio de la cadena de prubea y search busca el match en cualquier parte de la cadena.
# \d -> equivale [0-9]
# \D -> equivale [^0-9]
# \A -> iniciar con el match
# \w -> equivale [a-zA-Z_]
# \W -> equivale [^a-zA-Z_]
# \s -> equivale a cualquier caracter en blanco [ \n\t]
# \S -> equivale a cualquier caracter no en blanco
patron = re.compile("a[3-5]+")
print(patron.match("a333"))
print(patron.search("ba544"))
print(patron.findall("ba544 a333 a768 a355"))
|
985,957 | 8f120f82a6c45c839f6403e735f9b15f4d363bd8 | import rx
l = ['Hola', 'Adiós']
rx.from_(l).subscribe(
on_next=lambda v: print(f'Recibido: {v}'),
on_completed=lambda: print('Done!')
)
|
985,958 | 302e9c6fd69258555653790b7fa2d01c571dc634 | from ffindex.content import FFIndexContent
import mmap
try:
isinstance("", basestring)
def _is_string(s):
return isinstance(s, basestring)
except NameError:
def _is_string(s):
return isinstance(s, str)
def _to_file(fn, mode="rb"):
if _is_string(fn):
return open(fn, mode)
else:
return fn
def read(ffindex_data, ffindex_db=None, encoding=None):
"""Generator to parse FFindex entries"""
if ffindex_db is None:
if _is_string(ffindex_data):
ffindex_db = ffindex_data + ".index"
else:
raise Exception("When ffindex_data is passed as a file-like object, ffindex_db is required")
f_db = _to_file(ffindex_db, "r")
f_data = _to_file(ffindex_data, "r+b")
m_data = mmap.mmap(f_data.fileno(), 0)
for l_db in f_db:
filename, start, length = l_db.strip().split("\t")
yield FFIndexContent(m_data, int(start), int(length) - 1, filename, encoding)
m_data.close()
if _is_string(ffindex_db):
f_db.close()
if _is_string(ffindex_data):
f_data.close()
|
985,959 | 19ffb3a60c03f13456564a65ba73625e3173c405 | from abc import ABC, abstractmethod
from numpy.linalg import inv
import numpy as np
class MAB(ABC):
@abstractmethod
def play(self, tround, context):
# Current round of t (for my implementations average mean reward array
# at round t is passed to this function instead of tround itself)
self.tround = tround
# Context: features of contextual bandits
self.context = context
# choose an arm which yields maximum value of average mean reward, tie breaking randomly
chosen_arm = np.random.choice(np.where(self.tround==max(self.tround))[0])
return chosen_arm
pass
@abstractmethod
def update(self, arm, reward, context):
# get the chosen arm
self.arm = arm
# get the context (may be None)
self.context = context
# update the overall step of the model
self.step_n += 1
# update the step of individual arms
self.step_arm[self.arm] += 1
# update average mean reward of each arm
self.AM_reward[self.arm] = ((self.step_arm[self.arm] - 1) / float(self.step_arm[self.arm])
* self.AM_reward[self.arm] + (1 / float(self.step_arm[self.arm])) * reward)
return
pass
def offlineEvaluate(mab, rewards, contexts, tau, nrounds=None, u_cmab=1):
# array to contain chosen arms in offline mode
chosen_arms = np.zeros(nrounds)
# rewards of each chosen arm
reward_arms = np.zeros(nrounds)
# cumulative reward at each iteration
cumulative_reward = np.zeros(nrounds)
# initialize tround to zero
T = 0
# initialize overall cumulative reward to zero
G = 0
# History or memory of offline evaluator
history = []
# play once and get the initial action
action = mab.play(T, contexts[0,:])
#===============================
# MAIN LOOP ...
#===============================
for i in range(np.shape(contexts)[0]):
action = mab.play(T, contexts[i,:])
if T<nrounds:
# append the current context of chosen arm to the previous history (list)
history.append(contexts[i,:])
# get the reward of chosen arm at round T
reward_arms[T] = rewards[i][action] - action * tau * u_cmab
# the returned action is between 1-10, setting to python encoding ==> 0-9
mab.update(action, rewards[i][action] - action * tau * u_cmab, contexts[i,:])
# update overall cumulative reward
G += rewards[i][action]
# update cumulative reward of round T
cumulative_reward[T] = G
# store chosen arm at round T
chosen_arms[T] = action
T +=1
else:
# if desired tround ends, terminate the loop
break
return reward_arms, chosen_arms, cumulative_reward
class LinUCB(MAB):
def __init__(self, narms, ndims, alpha):
# Set number of arms
self.narms = narms
# Number of context features
self.ndims = ndims
# explore-exploit parameter
self.alpha = alpha
# Instantiate A as a ndims×ndims matrix for each arm
self.A = np.zeros((self.narms, self.ndims, self.ndims))
# Instantiate b as a 0 vector of length ndims.
self.b = np.zeros((narms, self.ndims, 1))
# set each A per arm as identity matrix of size ndims
for arm in range(self.narms):
self.A[arm] = np.eye(self.ndims)
super().__init__()
return
def play(self, tround, context):
# gains per each arm
p_t = np.zeros(self.ndims)
context = np.reshape(context, (self.narms, self.ndims))
#===============================
# MAIN LOOP ...
#===============================
for i in range(self.narms):
# initialize theta hat
self.theta = inv(self.A[i]).dot(self.b[i])
# get context of each arm from flattened vector of length 100
cntx = context[i]
# get gain reward of each arm
p_t[i] = self.theta.T.dot(cntx
) + self.alpha * np.sqrt(
cntx.dot(inv(self.A[i]).dot(cntx)))
action = np.random.choice(np.where(p_t==max(p_t))[0])
# np.argmax returns values 0-9, we want to compare with arm indices in dataset which are 1-10
# Hence, add 1 to action before returning
return action
def update(self, arm, reward, context):
context = np.reshape(context, (self.narms, self.ndims))
self.A[arm] = self.A[arm] + np.outer(context[arm],context[arm])
self.b[arm] = np.add(self.b[arm].T, context[arm]*reward).reshape(self.ndims,1)
return
|
985,960 | d23b8a957c66f2944566bdb9759f97585b1a783d | f = open("a.in")
f.readline()
for case, line in enumerate(f):
people = line.split()[1]
standing = 0
res = 0
for shyness, count in enumerate(people):
count = int(count)
if (shyness <= standing):
standing += count
else:
res += shyness - standing
standing += shyness - standing + count
print "Case #" + str(case) + ": " + str(res)
|
985,961 | 4ae14c184a3fc5069cd5d8647997272059cd9519 | from .enums import Instance_options
from .model import Model, Pair
"""Functions to deal with File input."""
def _get_simple_pref_list_and_ranks(pref_list):
"""Creates integer list preference list and rank list.
For the preference list "4 5 (1 2) 3", the output will be:
simp_pref_list = [4 5 1 2 3], simp_ranks = [1 2 3 3 5]
Args:
pref_list: The preference list to import.
Returns:
The simplified preference list.
The ranks of each preference list agent.
"""
simp_pref_list = []
simp_ranks = []
rank = 1
in_tie = False
for i in range(len(pref_list)):
if '(' in pref_list[i]:
elem_num = int(pref_list[i].replace('(', ''))
simp_pref_list.append(elem_num)
simp_ranks.append(rank)
in_tie = True
elif ')' in pref_list[i]:
elem_num = int(pref_list[i].replace(')', ''))
simp_pref_list.append(elem_num)
simp_ranks.append(rank)
rank+=1
in_tie = False
else:
simp_pref_list.append(int(pref_list[i]))
simp_ranks.append(rank)
if not in_tie:
rank+=1
return simp_pref_list, simp_ranks
def _create_pairs_row(model, st_prefs, st_num):
"""Creates a list of Pairs mirroring a student's preference list.
Args:
model: The model representation of the instance.
st_prefs: Student preference list.
st_num: The student ID.
Returns:
The Pair list for the given student.
"""
pairs_row = []
simp_st_prefs, simp_st_ranks = _get_simple_pref_list_and_ranks(st_prefs)
for i in range(len(simp_st_prefs)):
pairs_row.append(Pair(st_num, simp_st_prefs[i], simp_st_ranks[i]))
in_tie = True
return pairs_row
def _create_student_ranks(model, lec_prefs, lec_num):
"""Creates a list of the rank of each student for each lecturer.
Args:
model: The model representation of the instance.
lec_prefs: Lecturer preference list.
lec_num: The lecturer ID.
Returns:
The rank of each student for each lecturer.
"""
student_ranks = {}
simp_lec_prefs, simp_lec_ranks = _get_simple_pref_list_and_ranks(lec_prefs)
for i, _ in enumerate(simp_lec_prefs):
student_ranks[(lec_num, simp_lec_prefs[i])] = simp_lec_ranks[i]
return student_ranks
def _set_lecturers(model, project_lecturers):
"""Sets the lecturer number for each Pair in the model.
Args:
model: The model representation of the instance.
project_lecturers: Which lecturer supervises each project.
"""
for st_pairs in model.pairs:
for st_pr_pair in st_pairs:
st_pr_pair.set_lecturer(project_lecturers[st_pr_pair.project_index])
def _set_lecturer_ranks(model, lec_st_ranks):
"""Sets the lecturer ranks for each Pair in the model.
Args:
model: The model representation of the instance.
lec_st_ranks: The rank of each student for each lecturer.
"""
for st_pairs in model.pairs:
for st_pr_pair in st_pairs:
rank = lec_st_ranks[(st_pr_pair.lecturerID, st_pr_pair.studentID)]
st_pr_pair.set_lecturer_rank(rank)
def _import_from_file(filename, instance_options):
"""Imports a matching instance from file, returning an SPA-STL model.
Args:
filename: The file name of the file to import from.
instance_options: User chosen instance options.
Returns:
The model representation of the instance.
"""
model = Model()
project_lecturers = []
lecturer_student_ranks = {}
with open(filename) as f:
for index, line in enumerate(f):
line_split = line.replace(':', '').split()
#first line
if index == 0:
first_line = line.split()
model.num_students = int(first_line[0])
model.num_projects = int(first_line[1])
if instance_options[Instance_options.NUMAGENTS] == 2:
model.num_lecturers = int(first_line[1])
if instance_options[Instance_options.NUMAGENTS] == 3:
model.num_lecturers = int(first_line[2])
# student preference lists
elif index < model.num_students + 1:
st_prefs = line_split[1:]
st_num = index
pairs_row = _create_pairs_row(model, st_prefs, st_num)
model.pairs.append(pairs_row)
# projects information
elif index < model.num_students + model.num_projects + 1:
# SPA
if instance_options[Instance_options.NUMAGENTS] == 3:
model.proj_lower_quotas.append(int(line_split[1]))
model.proj_upper_quotas.append(int(line_split[2]))
project_lecturers.append(int(line_split[3]))
# HR
if instance_options[Instance_options.NUMAGENTS] == 2:
model.proj_lower_quotas.append(int(line_split[1]))
model.proj_upper_quotas.append(int(line_split[2]))
proj_num = index - (model.num_students)
project_lecturers.append(proj_num)
model.lec_lower_quotas.append(int(line_split[1]))
model.lec_targets.append(int(line_split[2]))
model.lec_upper_quotas.append(int(line_split[2]))
proj_num = len(model.proj_lower_quotas)
# if hospital preference lists are present, add them to the
# model
if instance_options[Instance_options.TWOPL]:
additional_hosp_ranks = _create_student_ranks(
model, line_split[3:], proj_num)
lecturer_student_ranks.update(additional_hosp_ranks)
# SPA only - lecturer information
elif (index < model.num_students + model.num_projects +
model.num_lecturers + 1 and
instance_options[Instance_options.NUMAGENTS] == 3):
model.lec_lower_quotas.append(int(line_split[1]))
model.lec_targets.append(int(line_split[2]))
model.lec_upper_quotas.append(int(line_split[3]))
rank = 1
lec_num = index - (model.num_students + model.num_projects)
# if lecturer preference lists are present, add them to the
# model
if instance_options[Instance_options.TWOPL]:
additional_lec_st_ranks = _create_student_ranks(
model, line_split[4:], lec_num)
lecturer_student_ranks.update(additional_lec_st_ranks)
model.proj_lecturers = project_lecturers
_set_lecturers(model, project_lecturers)
if instance_options[Instance_options.TWOPL]:
_set_lecturer_ranks(model, lecturer_student_ranks)
return model
def import_model(filename, instance_options):
"""Returns a completely initialised model.
Args:
filename: The file name of the file to import from.
instance_options: User chosen instance options.
Returns:
The model representation of the instance.
"""
model = _import_from_file(filename, instance_options)
model.set_project_lists()
model.set_lecturer_lists()
model.set_rank_lists()
return model
|
985,962 | 796cfb0f8f85a9737f07598bfad1b348807af030 | S = list(input())
# print(S)
ListS = []
# print(S)
for i in range(len(S)):
ListS.append(S)
tem = S
S = S[1:len(S)]
S.append(tem[0])
ListS.sort()
# print(ListS)
for s in ListS[0]:
print(s, end="")
print("")
for s in ListS[-1]:
print(s, end="")
print("")
|
985,963 | e8de9a71c0b0dbad56043418677979ae02c59d96 | import numpy as np
# import matlab.engine
#
# eng = matlab.engine.start_matlab()
def loss_cp(m, s):
cw = 0.25 # cost function width
b = 0.0 # exploration parameter
state_dim = m.shape[0]
D0 = state_dim # state dimension
D1 = state_dim + 2 # state dimension with cos/sin
M = np.zeros([D1, 1])
M[0:D0, 0:1] = m
S = np.zeros([D1, D1])
S[0:D0, 0:D0] = s
Mdm = np.concatenate([np.eye(D0), np.zeros([D1 - D0, D0])])
Sdm = np.zeros([D1 * D1, D0])
Mds = np.zeros([D1, D0 * D0])
Sds = np.kron(Mdm, Mdm)
# 2.Define static penalty as distance from target setpoint
ell = 0.6 # pendulum length
Q = np.zeros([D1, D1])
Q[[[0, 0], [D0, D0]], [[0, D0], [0, D0]]] = np.array(
[[1], [ell]]) @ np.array([[1, ell]])
Q[D0 + 1, D0 + 1] = ell**2
# 3. Augment angles????
# # 4. Calculate loss!
# L = 0
# dLdm = np.zeros([1, D0])
# dLds = np.zeros([1, D0 * D0])
# S2 = 0
#
# for i in range(1, len(cw)): # scale mixture of immediate costs
# cost.z = target;
# cost.W = Q / cw(i) ^ 2;
#
#
# [r rdM rdS s2 s2dM s2dS] = lossSat(cost, M, S);
#
# L = L + r;
# S2 = S2 + s2;
# dLdm = dLdm + rdM(:)'*Mdm + rdS(:)' * Sdm;
# dLds = dLds + rdM(:)'*Mds + rdS(:)' * Sds;
#
# if (b~=0 | | ~isempty(b)) & & abs(s2) > 1e-12
# L = L + b * sqrt(s2);
# dLdm = dLdm + b / sqrt(s2) * (s2dM(:)
# '*Mdm + s2dS(:)' * Sdm ) / 2;
# dLds = dLds + b / sqrt(s2) * (s2dM(:)
# '*Mds + s2dS(:)' * Sds ) / 2
return
m = np.array([[1, 2, 3, 4]]).T
s = 0.1 * np.ones([4, 4])
loss_cp(m, s)
|
985,964 | e6433cc0f85c14fa6875fd32aa53c0d7e72f5b13 | from data import DataAPI
from backtesting import Backtesting
from Visualization import Visualize
import pandas as pd
import matplotlib.pyplot as plt
user1 = DataAPI("IEX", IEX_token)
# path_aapl2y = r'D:\K\HU\HU - Courses\CISC 695 Research Methodology and Writing\Assignments\sample_data\aapl2y.csv'
# df = pd.read_csv(path_aapl2y)
# df = pd.read_csv(path_aapl2y)
df = user1.IEX_daily_pricing_data('AAPL', '2y')
bt1 = Backtesting(df)
df1 = bt1.RSI_single_stock(notification=False)
v1 = Visualize(df1)
v1.sma_plot_result()
# df2 = bt1.sma_backteting_bulk()
# print(df2) |
985,965 | 11052f7e56c5aa71f0ca5294ffa740a99db3e464 | from clarity_ext.unit_conversion import UnitConversion
from clarity_ext.clarity import ClaritySession
|
985,966 | 86889d06dc52126f6eb5642f8aa54d209a091f88 | import sounddevice as sd
import soundfile as sf
import tkinter
def recording():
fs = 48000
duration = 10
my_recording = sd.rec(frames=int(fs * duration) , samplerate=fs, channels=2)
sd.wait()
return sf.write('My_recording.flac', my_recording, samplerate=fs)
master = tkinter.Tk()
tkinter.Label(master=master, text='Voice Recorder :').grid(row=0, column=10, rowspan=10, columnspan=10)
button = tkinter.Button(master=master, text='Record', command=recording)
button.grid(row= 0, column=25, rowspan=15, columnspan=15)
tkinter.mainloop() |
985,967 | 31b0ee49957871af3f6181e99c99190e3667cbb4 | # coding: utf-8
"""
Relias API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
from test.helpers.vcr import vcr
import relias_api_client
from relias_api_client.api.assessments_api import AssessmentsApi # noqa: E501
from relias_api_client.rest import ApiException
class TestAssessmentsApi(unittest.TestCase):
"""AssessmentsApi unit test stubs"""
def setUp(self):
self.api = relias_api_client.api.assessments_api.AssessmentsApi() # noqa: E501
def tearDown(self):
pass
@vcr.use_cassette
def test_get_user_assignable_assessments(self):
"""Test case for get_user_assignable_assessments
Retrieves a paginated list of assignable assessments for the user corresponding to the username provided # noqa: E501
"""
api_instance = relias_api_client.AssessmentsApi(relias_api_client.ApiClient())
result = api_instance.get_user_assignable_assessments("test@dka.im")
self.assertEqual(result.total_count, 255)
if __name__ == '__main__':
unittest.main()
|
985,968 | 0364525b2ffb7c88ad3c06f9f6f4ec37e0629b77 | # coding=utf-8
"""
Since 2020 Apr, BOCHK revised its cash report format, holding report not affected.
This module handles the new format here. For each currency, then
1) use cash activity report balance (day end balance);
2) if not, use cash report (real time balance)
"""
from functools import partial, reduce
from itertools import takewhile, filterfalse
from toolz.functoolz import compose
from toolz.dicttoolz import valmap
from toolz.itertoolz import groupby as groupbyToolz
from utils.iter import pop
from clamc_datafeed.feeder import mergeDictionary
import logging
logger = logging.getLogger(__name__)
def lognContinue(msg, x):
logger.debug(msg)
return x
def getCashFromBalance(lines):
"""
[Iterable] lines
=> [Dictionary] currency -> cash entry
There can be multiple lines for a currency. We need to add up balances from
all those lines to form the final balances for the currency.
"""
def consolidate(group):
"""
[List] group => [Dictionary] consolidated position
group is a list of cash entries of the same currency, here we add up
their amount
"""
p = group[0].copy()
p['balance'] = sum(map(lambda p: p['balance'], group))
return valmap(lambda v: removeBOM(v) if isinstance(v, str) else v, p)
return compose(
partial(valmap, consolidate)
, partial(groupbyToolz, lambda d: d['currency'])
, getRawPositions
, lambda lines: lognContinue('getCashFromBalance(): start', lines)
)(lines)
def getCashFromActivity(lines):
"""
[Iterable] lines (from the cash avtivity file)
=> [Dictionary] currency -> cash entry
There may be multiple activity lines for a currency. The latest activity
has the most updated balance and it is the first activity line for that
currency. The outcome is a series of cash entries, each entry is a dictioanry
containing the keys (portfolio, currency, balance)
"""
"""
Only keep the first entry for a currency. Remove the leading '\ufeff'
character in its account name value, because sometimes due to utf-8
encoding this character is there.
"""
def combinePositions(acc, el):
return acc if el['currency'] in acc else \
mergeDictionary( acc
, {el['currency']: valmap(lambda v: removeBOM(v) if isinstance(v, str) else v, el)}
)
return compose(
lambda positions: reduce(combinePositions, positions, {})
, getRawPositions
, lambda lines: lognContinue('getCashFromActivity(): start', lines)
)(lines)
"""
[Iterable] lines => [Iterable] positions (each position is a dictionary)
Get the headers from the first line, then use that header and each subsequent
line to form a position (Dictionary).
"""
def getRawPositions(lines):
nonEmptyLine = lambda line: len(line) > 0 and line[0] != ''
headerMap = {
'Account Name': 'portfolio'
, 'Currency': 'currency'
, 'Currency(or Equiv.)': 'currency'
, 'Ledger Balance': 'balance'
, 'Ledger Balance(Total Equiv.)': 'balance'
}
"""
[List] line => [List] Headers
Only a few fields (headers) will be useful in the output csv, therefore
we map those headers to field names in the output csv.
"""
getHeadersFromLine = compose(
list
, partial(map, lambda s: headerMap[s] if s in headerMap else s)
, partial(map, lambda s: s.split('\n')[-1])
, partial(takewhile, lambda s: s != '')
)
return \
compose(
partial(map, dict)
, lambda t: map(partial(zip, getHeadersFromLine(t[0])), t[1])
, lambda lines: (pop(lines), lines)
, partial(takewhile, nonEmptyLine)
)(lines)
"""
[String] s => [String] s
Sometimes the values from the excel cells contains a leading '\ufeff' character,
because of utf-8 encoding. We want to remove that character if it is there.
"""
removeBOM = lambda s: s[1:] if len(s) > 0 and s[0] == '\ufeff' else s
|
985,969 | efdfe13b4d8eafedae98c46031bea17c4806c9a4 |
from datetime import datetime as dt
import xml.etree.ElementTree as XML
import json
import system
# Summary: Class object for the current state of all FW
# Mostly, it's a container for all the systems
_facIDs = {
"Caldari State" : 500001,
"Minmatar Republic" : 500002,
"Amarr Empire" : 500003,
"Gallente Federation" : 500004,
}
_factions = [500001, 500002, 500003, 500004]
class Warzone :
def __init__ (self, Data) :
self.data = Data
self.timestamp = Data['timestamp']
self.expires = Data['expires']
self.systems = {}
with open("names.json", 'r') as f :
names = json.load(f)
for sysdata in self.data['body'] :
name = names[str(sysdata['solar_system_id'])]
self.systems[name] = system.System(sysdata)
def Save (self, folder="history/") :
savename = folder + self.timestamp + '.json'
with open(savename, 'w') as f :
json.dump(self.data, f, indent='\t')
def CountSystems (self, facs = _factions) : # returns [{facID : num_systems}, total]
countFacs = {}
countAll = 0
for ID in facs :
countFacs[ID] = 0
for name, sys in self.systems.items() :
if sys.ownerID in facs :
countFacs[sys.ownerID] += 1
countAll += 1
return [countFacs, countAll]
class WarzoneDiff :
# aggregates two warzones, then stores in a single container
def __init__ (self, wzNew, wzOld) :
self.TimeOld = wzOld.timestamp
self.TimeNew = wzNew.timestamp
self.NextExpiry = wzNew.expires
self.FacDeltas = { # facID : [dplex, oplex, total]
500001 : [0,0,0], # Caldari
500002 : [0,0,0], # Minmatar
500003 : [0,0,0], # Amarr
500004 : [0,0,0] # Gallente
}
self.FacSysCounts = {
500001 : 0,
500002 : 0,
500003 : 0,
500004 : 0
}
self.systems = wzNew.systems # reuse dictionary
for name, sys in self.systems.items() :
sys.old = wzOld.systems[name]
facID = sys.ownerID
enemyID = abs((facID%500000)-5)+500000
sys.delta = sys.Plexes() - sys.old.Plexes()
if facID != sys.old.ownerID :
sys.delta = 0
if (sys.delta > 0) : # oplexing happened; credit the enemy
self.FacDeltas[enemyID][1] += sys.delta
self.FacDeltas[enemyID][2] += sys.delta
else : # either deplexing or nothing happened; credit the owner
self.FacDeltas[facID][0] -= sys.delta
self.FacDeltas[facID][2] -= sys.delta
self.FacSysCounts[facID] += 1
# quick WZD generator
def GetWZD (new_ESI_data, old_ESI_data) :
wzNew = Warzone(new_ESI_data)
wzOld = Warzone(old_ESI_data)
return WarzoneDiff(wzNew, wzOld)
|
985,970 | f78c1e9e61482029355ba3cc00e91204f709ef5f | from pyspark.sql import DataFrame, functions as F
def lowercase_all_column_names(df:DataFrame)->DataFrame:
"""
Convert all column names to lower case.
"""
for col in df.columns:
df = df.withColumnRenamed(col, col.lower())
return df
def uppercase_all_column_names(df:DataFrame)->DataFrame:
"""
Convert all column names to upper case.
"""
for col in df.columns:
df = df.withColumnRenamed(col, col.upper())
return df
def add_metadata(df:DataFrame, field_dict:dict)->DataFrame:
for pair in field_dict.items():
df = df.withColumn(pair[0], F.lit(pair[1]))
return df |
985,971 | 42cd488011debaf5f251d464b5e5f4db2ec33661 | __author__ = 'hzliyong'
__metalass__ = type
class Rectangele:
def __init__(self):
self.width = 0
self.height = 0
def __setattr__(self, name, value):
if name == 'size':
self.width, self.height = value
else:
self.__dict__[name] = value
def __getattr__(self, name):
if name == 'size':
return self.width, self.height
else:
# return self.__dict__[name]
return AttributeError
r = Rectangele()
r.__setattr__('test', (12, 33))
print(r.__getattr__('test'))
|
985,972 | b7a311e63659b5fa986b2833ae147ee14c2ae93a | import os
from flask import Flask, request
import requests
import requests_cache
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv())
OPENROUTESERVICE_API_KEY = os.environ.get("ORS_API_KEY")
requests_cache.install_cache(allowable_methods=["GET", "POST"])
app = Flask(__name__, static_url_path="/static")
@app.route("/")
def hello_world():
return "Hello, World!"
@app.route("/directions", methods=["POST"])
def directions():
content = request.json
headers = {
"Accept": "application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8",
"Authorization": OPENROUTESERVICE_API_KEY,
"Content-Type": "application/json; charset=utf-8",
}
call = requests.post(
"https://api.openrouteservice.org/v2/directions/cycling-regular/geojson",
json=content,
headers=headers,
)
print(call.status_code, call.reason)
print(call.from_cache)
return call.text
|
985,973 | 9618149da53c1cb13ab45c140a1ff3b40040568f |
def search_reference(dic_values, dic_mask, row, col, band_name):
"""
Search and return the most recent cloud-free value of the time series and its date before the current date.
Extract all dates previous to the date of the current analysed image by accessing the keys from the masked
dictionary. For these dates, extract the pixel value from the values dictionary and the masked value from the masked
dictionary for a given pixel and write them in two different lists.
Extract the indices of the cloud-free pixels in a new list and from this indices list, save the last value. This
index corresponds to the most recent cloud-free value and is used in the next step to subset the most recent cloud
free date and pixel value.
:param object dic_values: The dictionary with the dates and the pixel values of the image saved as arrays.
:param object dic_mask: The dictionary with the dates and the generated cloud mask for the already analysed images.
:param int row: The row of the image for a pixel.
:param int col: The column of the image for a pixel.
:param str band_name: The band from which the pixel values are extracted.
:return: A list of the most recent cloud free date and pixel value.
"""
key_images = [key for key, value in dic_mask.items()]
value_pixel = [value[row, col] for key, value in dic_values[band_name].items() if key in key_images]
mask_pixel = [value[row, col] for key, value in dic_mask.items() if key in key_images]
indices_not_cloud = [index for index, value in enumerate(mask_pixel) if value == 1][-1]
reference_date = key_images[indices_not_cloud]
reference_value = value_pixel[indices_not_cloud]
return reference_date, reference_value
|
985,974 | b2406a408a2c49a73e24172c29cb69cb2948dc56 | from litex.gen import *
from litex.soc.interconnect import dfi, lasmi_bus
from litex.soc.cores.sdram.lasmicon.refresher import *
from litex.soc.cores.sdram.lasmicon.bankmachine import *
from litex.soc.cores.sdram.lasmicon.multiplexer import *
class ControllerSettings:
def __init__(self, req_queue_size=8, read_time=32, write_time=16, with_bandwidth=False):
self.req_queue_size = req_queue_size
self.read_time = read_time
self.write_time = write_time
self.with_bandwidth = with_bandwidth
class LASMIcon(Module):
def __init__(self, phy_settings, geom_settings, timing_settings,
controller_settings=None):
if controller_settings is None:
controller_settings = ControllerSettings()
if phy_settings.memtype in ["SDR"]:
burst_length = phy_settings.nphases*1 # command multiplication*SDR
elif phy_settings.memtype in ["DDR", "LPDDR", "DDR2", "DDR3"]:
burst_length = phy_settings.nphases*2 # command multiplication*DDR
address_align = log2_int(burst_length)
self.dfi = dfi.Interface(geom_settings.addressbits,
geom_settings.bankbits,
phy_settings.dfi_databits,
phy_settings.nphases)
self.lasmic = lasmi_bus.Interface(
aw=geom_settings.rowbits + geom_settings.colbits - address_align,
dw=phy_settings.dfi_databits*phy_settings.nphases,
nbanks=2**geom_settings.bankbits,
req_queue_size=controller_settings.req_queue_size,
read_latency=phy_settings.read_latency+1,
write_latency=phy_settings.write_latency+1)
self.nrowbits = geom_settings.colbits - address_align
###
self.submodules.refresher = Refresher(geom_settings.addressbits, geom_settings.bankbits,
timing_settings.tRP, timing_settings.tREFI, timing_settings.tRFC)
self.submodules.bank_machines = [BankMachine(geom_settings, timing_settings, controller_settings, address_align, i,
getattr(self.lasmic, "bank"+str(i)))
for i in range(2**geom_settings.bankbits)]
self.submodules.multiplexer = Multiplexer(phy_settings, geom_settings, timing_settings, controller_settings,
self.bank_machines, self.refresher,
self.dfi, self.lasmic)
def get_csrs(self):
return self.multiplexer.get_csrs()
|
985,975 | 1d22ed010fd258884c8a2895530ac390e2e69782 | from django.db import models
from django.db import models
class Empleado(models.Model):
# Campo para la relación one-to-many
nombre = models.CharField(max_length=40)
apellido = models.CharField(max_length=50, null=True)
dni = models.CharField(max_length=40, null=True)
telefono = models.IntegerField(null=True)
email = models.EmailField(null=True)
# Es posible indicar un valor por defecto mediante 'default'
# Para permitir propiedades con valor null, añadiremos las opciones null=True, blank=True.
def __str__(self):
return f"{self.nombre} {self.apellido}, Id: {self.id}"
# Definicion de los tipos de urgencia para el desplegable dentro del modelo ticket
TICKET_URGENCIA_CHIOCES = (
('urgente','URGENTE'),
('alta','ALTA'),
('media','MEDIA'),
('baja','BAJA')
)
# Definicion de los tipos de ticket para el desplegable dentro del modelo ticket
TICKET_TIPO_CHOICES = (
('avería','AVERÍA'),
('mejora','MEJORA'),
('mantenimiento','MANTENIMIENTO')
)
# Definicion de los tipos de estado para el desplegable dentro del modelo ticket
TICKET_ESTADO_CHOICES = (
('abierto','ABIERTO'),
('cerrado','CERRADO')
)
# Modelo de las tareas
class Descripcion(models.Model):
texto = models.CharField(max_length=200, null=True)
class ticket(models.Model):
# No es necesario crear un campo para la Primary Key, Django creará automáticamente un IntegerField.
numeroref = models.CharField(max_length=50, null=True)
titulo = models.CharField(max_length=50, null=True)
descripcion = models.CharField(max_length=50, null=True)
fecha_apertura = models.DateField(null=True)
fecha_resolucion = models.DateField(null=True)
urgencia = models.CharField(max_length=50,choices = TICKET_URGENCIA_CHIOCES,default='urgente', null=True)
tipo = models.CharField(max_length=50, choices= TICKET_TIPO_CHOICES, default= 'avería' ,null=True)
estado = models.CharField(max_length=50, choices= TICKET_ESTADO_CHOICES, default='abierto' ,null=True)
empleado = models.ForeignKey(Empleado, on_delete=models.CASCADE)
comentarios = models.CharField(max_length=50, null=True)
def __str__(self):
return f"id={self.id},numeroref={self.titulo},titulo={self.titulo},descripcion={self.descripcion},fechaaper={self.fecha_apertura},fechares{self.fecha_resolucion}," \
f"Urgencia={self.urgencia},tipo={self.tipo},estado={self.estado},empleado={self.empleado}comentarios={self.comentarios}"
class Equipo(models.Model):
# No es necesario crear un campo para la Primary Key, Django creará automáticamente un IntegerField.
modelo = models.CharField(max_length=50)
numeroserie = models.CharField(max_length=50)
marca = models.CharField(max_length=50)
tipo = models.CharField(max_length=50)
fecha_adquisicion = models.DateField()
fecha_puesta_marcha = models.DateField()
proveedor = models.CharField(max_length=100)
planta = models.CharField(max_length=200)
def __str__(self):
return f"id={self.id},modelo={self.modelo},marca={self.marca},tipo={self.tipo},fecha_adquisicion={self.fecha_adquisicion},fecha_puesta_marcha={self.fecha_puesta_marcha}," \
f"proveedor={self.proveedor},planta={self.planta}"
|
985,976 | bed2b989d422b0b899beda1c10958f230498447c | # Dependencies
import praw
import sys
import time
from datetime import datetime, timedelta
import datahandler
from config import config
from bot_login import bot_login
from log_it import log_it, getFileName
from timeout import timeout
import matchthread
import flair
import mentions
import curl
# Get logfile for this session
logfile=getFileName()
# Get the posts and stuff
def get_posts(r):
try:
for submission in r.subreddit(config["sub_name"]).new(limit=30):
if submission.link_flair_text is None and config["moderate_flair"] is True:
comment = submission.reply("**Please flair your post!**\n\n" + config["flair_submission_reply"] + "\n\n" + config["bot_name"] + " will checkup on your post in a minute or two and will approve your post if it's been flaired.")
comment.mod.distinguish(sticky=True)
submission.mod.remove()
log_it(logfile, "\tAlerted no-flair on https://reddit.com/r/" + config["sub_name"] + "/comments/" + submission.id)
except Exception as e:
log_it(logfile, e)
# Check messages for updated posts
def check_messages(r):
# messages
try:
for message in r.inbox.unread(limit=None):
subject = message.subject.split()
# if config["moderate_flair"] is True and subject[0] == "Flaired:":
# flair.messages_flair(r, message, logfile)
if config["moderate_mentions"] is True and subject[0] == "ResetTheCounter:" :
mentions.message_resetthecounter(r, message, logfile)
if config["reply_curl"] is True:
curl.messages_respond(r, message, logfile)
except Exception as e:
log_it(logfile, str(e))
# mentions
if config["moderate_mentions"] is True:
try:
mods = r.subreddit(config["sub_name"]).moderator()
history = datahandler.get("mentions")
for message in r.inbox.mentions(limit=30):
mentions.messages_mentions(r, message, logfile, mods, history)
except Exception as e:
log_it(logfile, str(e))
# Check old messages for posts that have since been flaired correctly
def check_comments(r):
try:
for comment in r.redditor(config["bot_username"]).comments.new(limit=100):
if comment.removed is not True and comment.approved is not True:
if config["moderate_flair"] is True :
flair.messages_comment(r, comment, logfile)
except Exception as e:
log_it(logfile, str(e))
# Do it every 60 seconds
if __name__ == "__main__":
while True:
try:
r = bot_login(logfile)
if r is not False :
log_it(logfile, "Checking submissions...")
get_posts(r)
log_it(logfile, "Checking inbox/mentions...")
check_messages(r)
log_it(logfile, "Checking comments...")
check_comments(r)
log_it(logfile, "Checking for match threads...")
matchthread.check_for_match_thread(r, logfile)
log_it(logfile, "Finished task!\n---------------------------\n")
except Exception as e:
log_it(logfile, str(e))
time.sleep(60)
sys.exit()
|
985,977 | 82ffedde1a0204a29f677cdcfab636b0bf1506ca | def countingSort(arr):
n = len(arr)
count_arr = [0] * n
for i in range(n):
count_arr[arr[i]] += 1
return count_arr
if __name__ == '__main__':
arr = list(map(int, '63 25 73 1 98 73 56 84 86 57 16 83 8 25 81 56 9 53 98 67 99 12 83 89 80 91 39 86 76 85 74 39 25 90 59 10 94 32 44 3 89 30 27 79 46 96 27 32 18 21 92 69 81 40 40 34 68 78 24 87 42 69 23 41 78 22 6 90 99 89 50 30 20 1 43 3 70 95 33 46 44 9 69 48 33 60 65 16 82 67 61 32 21 79 75 75 13 87 70 33'.rstrip().split()))
result = countingSort(arr)
print(*result) |
985,978 | ff56f5855845fb8333387e461f2f1b165786dd48 |
# coding: utf-8
# In[1]:
km = int(input("digite a quantidade de km percorridos: "))
dias = int(input("digite a quantidade de dias em que o veículo ficou alugado: "))
preco = (0.15*km) + (60*dias)
print("o valor a ser pago é %.2f" % preco)
|
985,979 | 018a905ae5468df00b2f12d93974f33cccc046ea | """An openScope airspace object."""
import json
from .utilities.converters import fromPolygon, toPolygon
class AirspaceModel:
"""An openScope airspace object."""
airspaceClass = None
ceiling = None
floor = None
name = None
poly = []
def __init__(self, value):
self.airspaceClass = value['airspace_class']
self.ceiling = value['ceiling']
self.floor = value['floor']
self.name = None # value['name']
self.poly = toPolygon(value['poly'])
@staticmethod
def export(layer):
"""Export the specified QgsMapLayer features to JSON"""
lines = []
# Sort in order of area, largest first
for f in sorted(layer.getFeatures(), key=lambda x: -x.geometry().area()):
# The formatted list of lines
poly = fromPolygon(f)
pointLines = list(map(lambda x: ' ' + json.dumps(x), poly))
template = """{
"floor": %(floor)d,
"ceiling": %(ceiling)d,
"airspace_class": %(airspace_class)s,
"poly": [
%(poly)s
]
}"""
lines.append(template % {
'floor': f['floor'],
'ceiling': f['ceiling'],
'airspace_class': json.dumps(f['airspace_class']),
'poly': ',\n'.join(pointLines)
})
return ',\n'.join(lines)
|
985,980 | 3c2d50378759baf0bb9c24d97bb1297736cc55d8 | # TIME ESTAMTES GENERATING
config_elapsed_time_b60 = 42817.5 # seconds
time_per_config_generation = config_elapsed_time_b60 / 1000.0
# TIME ESTIMATES FLOWING
flow_elapsed_time_b60 = (1*24*60*60 + 13*60*60 + 14*60)
NFlows_B60 = 1000
NFlows_B61 = 500
NFlows_B62 = 500
NFlows_B645 = 250
flow_cfg_time_b60 = float(flow_elapsed_time_b60) / float(NFlows_B60)
lattice_size_b60 = 24**3*48.
lattice_size_b61 = 28**3*56.
lattice_size_b62 = 32**3*64.
lattice_size_b645 = 48**3*96.
scaling_b60_b61 = lattice_size_b61/lattice_size_b60
scaling_b60_b62 = lattice_size_b62/lattice_size_b60
scaling_b60_b645 = lattice_size_b645/lattice_size_b60
# print "BETA=6.0:\n %d seconds/%.2f minutes/%.2f hours\n Time per flow: %.4f minutes" % (flow_elapsed_time_b60,flow_elapsed_time_b60/60.,flow_elapsed_time_b60/3600.,flow_cfg_time_b60/60.0)
# print "BETA=6.1:\n %d hours\n Time per flow: %.4f minutes" % (flow_elapsed_time_b60/3600. * scaling_b60_b61 * 0.5,flow_cfg_time_b60/60.0 * scaling_b60_b61)
# print "BETA=6.2:\n %d hours\n Time per flow: %.4f minutes" % (flow_elapsed_time_b60/3600. * scaling_b60_b62 * 0.5,flow_cfg_time_b60/60.0 * scaling_b60_b62)
# print "BETA=6.45:\n %d hours\n Time per flow: %.4f minutes" % (flow_elapsed_time_b60/3600. * scaling_b60_b645 * 0.25,flow_cfg_time_b60/60.0 * scaling_b60_b645)
def print_flow_time_estimates(beta, tot_gen_time, time_per_generation, tot_flow_time, time_per_flow_time, scaling=1.0, config_number_scaling_factor=1.0):
msg = """BETA={0:<g}:
Total config generation time: {1:<.0f} seconds/{2:<.1f} minutes/{3:<.2f} hours
Time per configuration: {4:<.4f} minutes
Total flow time: {5:<.0f} seconds/{6:<.1f} minutes/{7:<.2f} hours
Time per flow: {8:<.4f} minutes""".format(
beta,
tot_gen_time * scaling * config_number_scaling_factor,
tot_gen_time/60.0 * scaling * config_number_scaling_factor,
tot_gen_time/3600.0 * scaling * config_number_scaling_factor,
time_per_generation/60.0 * scaling,
tot_flow_time * scaling * config_number_scaling_factor,
tot_flow_time/60.0 * scaling * config_number_scaling_factor,
tot_flow_time/3600.0 * scaling * config_number_scaling_factor,
time_per_flow_time/60.0 * scaling)
print msg
print_flow_time_estimates(6.0, config_elapsed_time_b60, time_per_config_generation, flow_elapsed_time_b60, flow_cfg_time_b60)
print_flow_time_estimates(6.1, config_elapsed_time_b60, time_per_config_generation, flow_elapsed_time_b60, flow_cfg_time_b60,scaling=scaling_b60_b61,config_number_scaling_factor=0.5)
print_flow_time_estimates(6.2, config_elapsed_time_b60, time_per_config_generation, flow_elapsed_time_b60, flow_cfg_time_b60,scaling=scaling_b60_b62,config_number_scaling_factor=0.5)
print_flow_time_estimates(6.45, config_elapsed_time_b60, time_per_config_generation, flow_elapsed_time_b60, flow_cfg_time_b60,scaling=scaling_b60_b645,config_number_scaling_factor=0.25)
# import os
# # bf = "/work/users/hmvege/"
# # ipf = "/work/users/hmvege/output/prodRunBeta6_1/field_configurations/"
# bf = "/work/users/hmvege/"
# ipf = "//work/users/hmvege/output/prodRunBeta6_1/field_configurations"
# ipf = ipf.replace("//","/")
# print os.path.normpath(ipf)
# print os.path.relpath(ipf,bf) |
985,981 | ee7e02ac9e26dbd216e1ab053f785a94a560710b | # -*- coding: utf-8 -*-
from blog.models.book import Book
from blog.models.article import Article
|
985,982 | d2fe0d60d189b6241605c6edcef085fead1d1e1b | import copy
import sys
import os.path
sys.path.insert( 0, os.path.normpath(os.path.join( os.path.dirname( __file__ ), '..') ))
from aql_tests import skip, AqlTestCase, runLocalTests
from aql.options import OptionType, BoolOptionType, EnumOptionType, RangeOptionType, ListOptionType, \
OptionValue, ConditionalValue, Condition, SimpleOperation, SimpleInplaceOperation, \
SetValue, iAddValue, iSubValue, ErrorOptionTypeUnableConvertValue
from aql.util_types import Dict
#//===========================================================================//
def _condition( options, context, flag, opt_value = None ):
return flag
def _convertValue( options, context, value ):
if isinstance( value, OptionValue ):
value = value.get( options, context, _convertValue )
return value
class TestOptionValue( AqlTestCase ):
#//---------------------------------------------------------------------------//
def test_option_value(self):
opt_type1 = RangeOptionType( min_value = 0, max_value = 5 )
opt_value = OptionValue( opt_type1 )
cond = Condition( None, _condition, flag = True, opt_value = opt_value )
cond_value = ConditionalValue( iAddValue( 2 ), cond )
cond_value2 = ConditionalValue( iAddValue( 3 ), cond )
cond_value3 = ConditionalValue( iAddValue( 3 ), cond )
opt_value.appendValue( cond_value )
opt_value.appendValue( cond_value2 )
opt_value.appendValue( cond_value3 )
self.assertEqual( opt_value.get( options = {}, context = None ), 5 )
#//---------------------------------------------------------------------------//
def test_option_value2(self):
opt_value = OptionValue( OptionType( int ) )
cond_true = Condition( None, _condition, flag = True )
cond_false = Condition( cond_true, _condition, flag = False )
cond_false = Condition( cond_false, _condition, flag = True )
opt_value.appendValue( ConditionalValue( iAddValue( 2 ), cond_false ) )
self.assertEqual( opt_value.get( {}, None ), 0 )
opt_value.appendValue( ConditionalValue( iAddValue( 3 ), cond_true ) )
self.assertEqual( opt_value.get( {}, None ), 3 )
opt_value.appendValue( ConditionalValue( iAddValue( 1 ), cond_true ) )
self.assertEqual( opt_value.get( {}, None ), 4 )
opt_value.appendValue( ConditionalValue( iAddValue( 1 ), cond_false ) )
self.assertEqual( opt_value.get( {}, None ), 4 )
opt_value2 = OptionValue( OptionType( int ) )
opt_value.appendValue( ConditionalValue( SetValue( opt_value2 ), cond_true ) )
opt_value2.appendValue( ConditionalValue( SetValue( 7 ), cond_true ) )
self.assertEqual( opt_value.get( {}, None, _convertValue ), 7 )
self.assertEqual( opt_value2.get( {}, None, _convertValue ), 7 )
opt_value2.appendValue( ConditionalValue( SetValue( 8 ), cond_true ) )
self.assertEqual( opt_value.get( {}, None, _convertValue ), 8 )
self.assertEqual( opt_value2.get( {}, None, _convertValue ), 8 )
opt_value.appendValue( ConditionalValue( iSubValue( 0 ), cond_true ) )
self.assertEqual( opt_value.get( {}, None, _convertValue ), 8 )
tmp_opt_value = opt_value.copy()
self.assertEqual( tmp_opt_value.get( {}, None, _convertValue ), 8 )
tmp_opt_value.appendValue( ConditionalValue( iAddValue( 2 ), cond_true ) )
self.assertEqual( tmp_opt_value.get( {}, None, _convertValue ), 10 )
#//---------------------------------------------------------------------------//
def test_option_value3(self):
opt_value = OptionValue( OptionType( int ) )
opt_value.appendValue( ConditionalValue( SetValue( 1 ) ) )
self.assertEqual( opt_value.get( {}, None ), 1 )
opt_value.appendValue( ConditionalValue( SetValue( 0 ) ) )
self.assertEqual( opt_value.get( {}, None ), 0 )
opt_value_list = OptionValue( ListOptionType( value_type = int ) )
opt_value_list.appendValue( ConditionalValue( SetValue( 1 ) ) )
self.assertEqual( opt_value_list.get( {}, None ), 1 )
opt_value_list.appendValue( ConditionalValue( iAddValue( 0 ) ) )
self.assertEqual( opt_value_list.get( {}, None ), "1, 0" )
#//---------------------------------------------------------------------------//
def test_option_value4(self):
opt_value = OptionValue( OptionType( int ) )
def _incValue( value ):
return value + 1
opt_value = OptionValue( OptionType( int ) )
opt_value.appendValue( ConditionalValue( SetValue( 2 ) ) )
opt_value.appendValue( ConditionalValue( SimpleInplaceOperation( _incValue ) ) )
self.assertEqual( opt_value.get( {}, None ), 3 )
#//---------------------------------------------------------------------------//
def test_option_value_enum(self):
value_type = EnumOptionType( values = ( ('off', 0), ('size', 1), ('speed', 2) ) )
opt_value = OptionValue( value_type )
opt_value.appendValue( ConditionalValue( SetValue( 'size' ) ) )
self.assertEqual( opt_value.get( {}, None ), value_type(1) )
opt_value.appendValue( ConditionalValue( SetValue( 'ultra' ) ) )
self.assertRaises( ErrorOptionTypeUnableConvertValue, opt_value.get, {}, None )
#//---------------------------------------------------------------------------//
def test_option_value_cyclic(self):
opt_value1 = OptionValue( OptionType( value_type = int ) )
opt_value2 = OptionValue( RangeOptionType( min_value = 0, max_value = 5 ) )
opt_value1.appendValue( ConditionalValue( SetValue( 1 ) ) )
self.assertEqual( opt_value1.get( {}, None, _convertValue ), 1 )
opt_value2.appendValue( ConditionalValue( SetValue( 2 ) ) )
self.assertEqual( opt_value2.get( {}, None, _convertValue ), 2 )
opt_value1.appendValue( ConditionalValue( iAddValue( opt_value2 ) ) )
self.assertEqual( opt_value1.get( {}, None, _convertValue ), 3 )
opt_value2.appendValue( ConditionalValue( iAddValue( opt_value1 ) ) )
self.assertEqual( opt_value2.get( {}, None, _convertValue ), 5 )
opt_value1.appendValue( ConditionalValue( iAddValue( opt_value2 ) ) )
self.assertEqual( opt_value2.get( {}, None, _convertValue ), opt_value2.option_type(7) )
self.assertEqual( opt_value1.get( {}, None, _convertValue ), 7 )
# opt1: 1 + opt2 + opt2 = 1 + 3 + 3
# opt2: 2 + opt1 = 2 + 1 + 2 + 2
#//---------------------------------------------------------------------------//
def test_option_value_list(self):
opt_type1 = ListOptionType( value_type = EnumOptionType( values = ( ('off', 0), ('size', 1), ('speed', 2) ) ) )
opt_value = OptionValue( opt_type1 )
cond = Condition( None, _condition, flag = True, opt_value = opt_value )
cond2 = Condition( cond, _condition, flag = False, opt_value = opt_value )
cond_value = ConditionalValue( iAddValue( 1 ), cond )
cond_value2 = ConditionalValue( iAddValue( 0 ), cond2 )
cond_value3 = ConditionalValue( iAddValue( 2 ), cond )
cond_value4 = ConditionalValue( iAddValue( 1 ), cond2 )
opt_value.appendValue( cond_value )
opt_value.appendValue( cond_value2 )
opt_value.appendValue( cond_value3 )
opt_value.appendValue( cond_value4 )
self.assertEqual( opt_value.get( {}, None ), [1,2] )
opt_value.prependValue( cond_value3 )
self.assertEqual( opt_value.get( {}, None ), [2,1,2] )
opt_value = copy.copy( opt_value )
self.assertEqual( opt_value.get( {}, None ), [2,1,2] )
self.assertIs( opt_value.option_type, opt_type1 )
#//---------------------------------------------------------------------------//
def test_option_value_dict(self):
opt_type1 = OptionType( value_type = dict )
opt_value = OptionValue( opt_type1 )
cond_value = ConditionalValue( SetValue( {1:2} ) )
cond_value = ConditionalValue( SetValue( {3:4} ) )
opt_value.appendValue( cond_value )
self.assertEqual( opt_value.get( {}, None ), {3:4} )
opt_type1 = OptionType( value_type = Dict )
#//===========================================================================//
if __name__ == "__main__":
runLocalTests()
|
985,983 | 98a3dfc2963330600ded8ca7d338ff285c981872 | from django.shortcuts import render
# Create your views here.
from django.shortcuts import render
def loadPage(request):
title = "Temperature Sensor"
message = "The list of temperatures is shown below in Degree Celsius.<p>Date Time | Temperature"
return render(request, 'webclient_app.html', {'title': title, 'message': message}) |
985,984 | 36cfc78585be00e85bb53e0100324c85b336f69c | version https://git-lfs.github.com/spec/v1
oid sha256:e094636ca77a7e06ff19a86e005ff1af939a962bf8330053c7f76fc49ac45ff4
size 159430
|
985,985 | d7c2f2b09a1dc01467f16f5e2fa82e3d1c26a8f6 | # -*- coding:utf-8 -*-
import xlrd
import xlwt
import os
import json
import re
from xlutils.copy import copy
'''
批量修改格式为:item_id:is_bind:num 物品/物品列表的物品id
'''
# 获取path路径下的后缀为postfix文件名列表
def getPostfixFileList(path, postfix):
return [file for file in os.listdir(path) if os.path.isfile(path + '/' + file) and os.path.splitext(file)[1]==postfix]
# 读取一个xls文件,返回xls的sheet名列表
def getSheetNameList(xls_name):
workbook = xlrd.open_workbook(xls_name)
sheet_names = workbook.sheet_names()
return sheet_names
# 读取一个xls文件,返回xls的sheet字典
def getSheetDict(path, xls_name):
sheet_obj_dict = { }
workbook = xlrd.open_workbook(path + '/' + xls_name)
sheet_names = workbook.sheet_names()
for sheet_name in sheet_names:
sheet_obj_dict[sheet_name] = workbook.sheet_by_name(sheet_name)
return sheet_obj_dict
# 返回物品和物品列表单元格行列元组(row, col)列表。整型会被转成字符串
def getItemAndItemlistCellList(sheet_obj, original_item_id, target_item_id):
cell_list = []
nrows = sheet_obj.nrows
for row in range(nrows):
for col in range(len(sheet_obj.row_values(row))):
cell_value = sheet_obj.row_values(row)[col]
# 是浮点数或整数那就肯定不是物品id格式
if isinstance(cell_value, float) or isinstance(cell_value, int):
continue
cell_value = str(cell_value)
# 如果单元格内容是空字符串不记录
if cell_value == '':
continue
# 筛选出只含有original_item_id的单元格并替换成target_item_id
string_index = cell_value.find(original_item_id + ':')
if string_index != -1:
cell_value = cell_value.replace(original_item_id, target_item_id)
cell = { (row, col) : cell_value }
cell_list.append(cell)
else :
continue
return cell_list
def main():
xls_path = 'C:/work/ug04_cn/xlsdata'
save_path = 'C:/work/ug04_cn/xlsdata/update_excle_item_id/xls'
item_list = []
with open("./item_id_cfg.json", 'r') as load_f:
load_dict = json.load(load_f)
print(load_dict)
item_list.append(load_dict)
xls_list = getPostfixFileList(xls_path, '.xls')
# 读取需要修改的xls表到xls_dict
xls_dict = {}
for item_dict in item_list:
original_item_id = ''
target_item_id = ''
for key, value in item_dict.items():
original_item_id = key
target_item_id = value
# 遍历当前文件夹xls文件
for xls_name in xls_list:
sheet_dict = {}
sheet_obj_dict = getSheetDict(xls_path, xls_name)
for sheet_name in sheet_obj_dict:
cell_list = getItemAndItemlistCellList(sheet_obj_dict[sheet_name], original_item_id, target_item_id)
# 如果单元格列表为空不记录
if len(cell_list) == 0:
continue
sheet_dict[sheet_name] = cell_list
print(xls_name, " ", sheet_name, " ", sheet_dict[sheet_name])
xls_dict[xls_name] = sheet_dict
# 输出将修改的日志
with open("./readlog.txt", "w", encoding='utf-8') as f:
for xls_name in xls_dict:
print(xls_name, " ", xls_dict[xls_name], file=f)
# 获取xls_dict对应的xls对应的sheet对应的单元格进行修改操作
for w_xls_name, w_sheet_dict in xls_dict.items():
wbk = xlrd.open_workbook(xls_path + '/' + w_xls_name)
workbooknew = copy(wbk)
for w_sheet_name, w_cell_list in w_sheet_dict.items():
w_sheet = workbooknew.get_sheet(w_sheet_name)
for w_cell_dict in w_cell_list:
for w_col_rol_tuple, w_cell_val in w_cell_dict.items():
w_sheet.write(w_col_rol_tuple[0], w_col_rol_tuple[1], str(w_cell_val))
workbooknew.save(save_path + '/' + w_xls_name)
if __name__ == '__main__':
main()
|
985,986 | 24ea8e3a223d9718d414ceb3608b1fc05b4b733d | #!/usr/bin/env python
import re
def isvalid(email):
"""chech email for validity"""
pattern = re.compile(r"^([a-zA-Z0-9_\-]+)@([a-zA-Z0-9]+)\.\w{,3}$")
return bool(pattern.match(email))
if __name__ == '__main__':
n = int(input())
emails = (input().strip() for _ in range(n))
valid_emails = filter(isvalid, emails)
print(sorted(valid_emails)) |
985,987 | 94de3e04603fe434c6482da361e7077281a9387e | from tortoise.exceptions import NoValuesFetched
from tortoise.models import Model
from tortoise import fields
from typing import List
from schemas.student import PStudentBase
from schemas.course import PCourseBase
class Student(Model):
id = fields.IntField(pk=True, generated=True, index=True)
first_name = fields.CharField(max_length=45, index=True)
last_name = fields.CharField(max_length=45, index=True)
phone = fields.CharField(max_length=20, index=True)
address = fields.CharField(max_length=255, index=True)
courses_list: fields.ManyToManyRelation["Course"]
async def courses(self) -> List[PCourseBase]:
try:
return await self.courses_list.all()
except NoValuesFetched:
return -1
class PydanticMeta:
computed = ("courses_list",)
class Course(Model):
id = fields.IntField(pk=True, generated=True, index=True)
name = fields.CharField(max_length=25, index=True)
description = fields.CharField(max_length=255, index=True)
students_list: fields.ManyToManyRelation[Student] = fields.ManyToManyField('models.Student',
related_name='courses_list',
through='student_course')
async def students(self) -> List[PStudentBase]:
try:
return await self.students_list.all()
except NoValuesFetched:
return -1
class PydanticMeta:
computed = ("students_list",) |
985,988 | 236948b483754fb45e5761d4074502b48945ffca | from django.core.urlresolvers import reverse
from django.test import TestCase
from model_mommy import mommy
from chemicals import models, admin
from chemicals.tests.utils import TestUsers
class ChemicalAdminTest(TestCase):
"Test custom admin functions"
def SetUp(self):
users = TestUsers()
self.assertTrue(users.login(users.superadmin))
def count_archived(self):
return models.Chemical.objects.filter(archive=True).count()
def count_active(self):
return models.Chemical.objects.filter(archive=False).count()
def test_archive_chemicals(self):
active = mommy.make(models.Chemical, archive=False, _quantity=5)
archived = mommy.make(models.Chemical, archive=True, _quantity=4)
change_url = reverse('admin:chemicals_chemical_changelist')
self.assertEqual(self.count_active(), 5)
self.assertEqual(self.count_archived(), 4)
sel = [active[0].pk, active[1].pk]
data = {'action': 'archive_chemicals', '_selected_action': sel}
response = self.client.post(change_url, data)
self.assertTrue(response.status_code, 302)
self.assertEqual(self.count_active(), 3)
self.assertEqual(self.count_archived(), 6)
def test_unarchive_chemicals(self):
active = mommy.make(models.Chemical, archive=False, _quantity=5)
archived = mommy.make(models.Chemical, archive=True, _quantity=4)
change_url = reverse('admin:chemicals_chemical_changelist')
self.assertEqual(self.count_active(), 5)
self.assertEqual(self.count_archived(), 4)
sel = [archived[0].pk, archived[0].pk, archived[0].pk]
data = {'action': 'unarchive_chemicals', '_selected_action': sel}
response = self.client.post(change_url, data)
self.assertTrue(response.status_code, 302)
self.assertEqual(self.count_active(), 8)
self.assertEqual(self.count_archived(), 1)
|
985,989 | ba5d8bf4ba2bc61e1c22973d13b1d45766c63ef5 | import os
import base64
from datetime import datetime
from google.cloud import storage
def uploadPost(b64image, user_id):
# Upload a file to the bucket
storage_client = storage.Client()
bucket = storage_client.get_bucket('yorimichi_posts')
destination_blob_name = make_dest_name(user_id)
blob = bucket.blob(destination_blob_name)
# create uploadfile from b64
source_file_name = make_tmp_filename()
f = open(source_file_name, 'wb')
f.write(base64.b64decode(b64image))
f.close()
# upload
blob.upload_from_filename(source_file_name)
# remove tmpfile
os.remove(source_file_name)
return {'message': 'succeeded', 'cloud_storage_filename': destination_blob_name}
def downloadPost(bucket_name, stored_filename):
# Download a file and convert to base64
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
blob = bucket.blob(stored_filename)
tmp_filename = make_tmp_filename()
f = open(tmp_filename, 'wb')
blob.download_to_file(f)
f.close()
f = open(tmp_filename, 'rb')
encoded = base64.b64encode(f.read())
f.close()
os.remove(tmp_filename)
return encoded
def make_tmp_filename():
return os.path.dirname(os.path.abspath(__file__)) + '/tmp' + datetime.now().strftime('%f') + '.png'
def make_dest_name(user_id):
return str(user_id) + '-' + datetime.now().strftime('%Y%m%d%H%M%S%f') + '.png' |
985,990 | b1e8865a88fa2296c467c5b6b68656ffce64da1e | import matplotlib.pyplot as plt
from binaryclassifier.statistics.quantiles import (
get_bins, get_quantiles_from_bins)
def plot_quantiles(
y_true, scores, q=10, figsize=(8,5),
title='Quantile Bar Graph', color=None):
if q==10:
quant_type = 'Decile'
elif q==20:
quant_type = 'Ventile'
else:
quant_type = 'Quantile'
# Get quantiles from probabilities
bins = get_bins(scores, q=q, adjust_endpoints=True)
quantiles = get_quantiles_from_bins(scores, bins, one_high=True)
# Calculate event rate for each quantile
quantile_labels = sorted(set(quantiles))
rates = []
for q in quantile_labels:
arr = y_true[quantiles == q]
pos_vals = arr.sum()
rate = pos_vals / float(len(arr))
rates.append(rate)
# Plot event rate for each quantiles
fig, ax = plt.subplots(figsize=figsize)
ax.bar(quantile_labels, rates, align='center', color=color)
ax.set_xticks(quantile_labels)
plt.title(title)
plt.ylabel('Event Rate')
plt.xlabel(quant_type)
plt.close(fig)
return fig |
985,991 | 96939baaf57b464064d5572192706d9e2fe6d4e3 | """
训练LR分类器
"""
import sys ;sys.path.append('../')
from sklearn import metrics
import F_TrainModel.GenDataTool as GDTool
import DataLinkSet as DLSet
import joblib
import time
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder
import numpy as np
# 训练模型
def TrainHyBrid(gd, npRatio, nt, modelLink_gbdt, modelLink_enc, modelLink_lr):
t1 = time.time()
# 训练集
train_X, train_y = gd.Gen_TrainData(npRatio=npRatio, subRatio=1)
# 等分为两部分
train_X_gbdt, train_X_lr, train_y_gbdt, train_y_lr = train_test_split(train_X, train_y, test_size=0.5)
# S1. 训练GBDT分类器
GBDT_clf = GradientBoostingClassifier(learning_rate=0.05,
n_estimators=nt,
max_depth=7,
subsample=0.65,
max_features="sqrt")
GBDT_clf.fit(train_X_gbdt, train_y_gbdt)
print('gbdt has trained')
# S2. 训练编码器
enc = OneHotEncoder(categories='auto')
enc.fit(GBDT_clf.apply(train_X_gbdt)[:, :, 0])
# enc.fit(GBDT_clf.apply(train_X_gbdt)[:, :, 0])
print('enc has trained')
# S3. 训练LR分类器
LR_clf = LogisticRegression(solver='saga', max_iter=2000, penalty='l1', n_jobs=-1)
LR_clf.fit(enc.transform(GBDT_clf.apply(train_X_lr)[:, :, 0]), train_y_lr)
# LR_clf.fit(enc.transform(GBDT_clf.apply(train_X_lr)[:, :, 0]), train_y_lr)
print('lr has trained')
t2 = time.time()
print('train time used %d s' % (t2 - t1))
# 存储模型
joblib.dump(GBDT_clf, modelLink_gbdt)
joblib.dump(enc, modelLink_enc)
joblib.dump(LR_clf, modelLink_lr)
# 进行预测
def Predict(gd, modelLink_gbdt, modelLink_enc, modelLink_lr, thre=0.5):
# 加载模型
GBDT_clf = joblib.load(modelLink_gbdt)
enc = joblib.load(modelLink_enc)
LR_clf = joblib.load(modelLink_lr)
# 测试集
judge_X, judge_y = gd.Gen_JudgeData(subRatio=1)
# 预测
temp = LR_clf.predict_proba(enc.transform(GBDT_clf.apply(judge_X)[:, :, 0]))
# 评价
f1Set = []
coSet = []
for co in np.arange(0.01, 1, 0.05):
judge_y_pred = (temp[:, 1] > co)
f1 = metrics.f1_score(judge_y, judge_y_pred)
p = metrics.precision_score(judge_y, judge_y_pred)
r = metrics.recall_score(judge_y, judge_y_pred)
print('co = %.2f' % co, ' f1 = %.4f' % f1, ' Precision = %.6f' % p, ' Recall = %.4f' % r)
f1Set.append(f1)
coSet.append(co)
# ShowPic(coSet, f1Set, "penalty='l1'", 'hybrid: co -> f1', 'co')
print('argmax co = %f' % (0.01 + 0.05 * np.argmax(f1Set)))
return temp[:, 1] > (0.01 + 0.05 * np.argmax(f1Set))
if __name__ == '__main__':
# 生成 gd
gd = GDTool.GenDataTool(
DLSet.new_trainData_cluster_link,
DLSet.feature_U_train_link,
DLSet.feature_I_train_link,
DLSet.feature_UI_train_link,
DLSet.trainData_scaler_link,
DLSet.saleInfo_Judge_R_UILabel_link,
DLSet.feature_U_judge_link,
DLSet.feature_I_judge_link,
DLSet.feature_UI_judge_link,
)
#
# TrainHyBrid(gd, 19, 160,
# DLSet.hybrid_classifier_GBDT_link,
# DLSet.hybrid_classifier_ENC_link,
# DLSet.hybrid_classifier_LR_link)
res = Predict(gd,
DLSet.hybrid_classifier_GBDT_link,
DLSet.hybrid_classifier_ENC_link,
DLSet.hybrid_classifier_LR_link)
gd.Gen_Res(res, DLSet.resLRGBDT_link)
|
985,992 | 21aa5ed20ba78cbd6b3e10699a7bdbeb9c4ce145 | from segmentor import make_cuter
from utils import *
import tqdm
tqdm.tqdm.pandas(ncols=75)
import pandas as pd
def cut_word(df, cuter, cleaner, par=True):
title_words = df.title.progress_apply(lambda j: cleaner(cuter(j)))
global co
co = 0
def f(j):
global co
co += 1
if co % 100 == 0:
print('\r 大概进行到了',round(co*4*100/df.shape[0],2),'%',end='')
try:
return cleaner(cuter(j.context))
except:
return []
if par:
print(' 多线程分词:')
context_words = para_apply(df, f)
print(' 多线程分词完成。')
else:
context_words = df.context.progress_apply(lambda j: cleaner(cuter(j)))
return pd.concat([title_words, context_words], axis=1)
import sys
if len(sys.argv)>1:#命令行启动
n=int(sys.argv[1])
par=int(sys.argv[2])
else:#自行设置
n = 4
par = False
#print(n,par)
if __name__=='__main__':
cuter=make_cuter(n)
#print(cuter)
train_text=load('train_text' )
test_text=load('test_text', )
train_list=load('train_list', )
# 加载stopwords词典
with open('../stopwords.txt', encoding='utf-8') as f:
stop_words = set(f.read().split('\n'))
def clean(l):
return [w for w in l if not (len(w.strip()) < 2 or w.lower() in stop_words or ',' in w )]
train_word=cut_word(train_text,cuter,clean,par)
train_word.columns = ['title', 'context']
#raise
test_word=cut_word(test_text,cuter,clean,par)
test_word.columns = ['title', 'context']
split_save(test_word,'test_word'+str(n))
save('train_word'+ str(n), train_word)
#save('test_word' + str(n), test_word) |
985,993 | 74ac540fd27474b68de051455e63d52758a05688 | ## Python implementation of MBMA (Van den Bosch & Daelemans 1999)
## Copyright (C) 2011 Institute for Dutch Lexicology (INL)
## Author: Folgert Karsdorp, INL
## E-mail: <servicedesk@inl.nl>
## URL: <http://www.inl.nl/>
## For licence information, see LICENCE.TXT
"""
Commandline interface to mbmp.
"""
import argparse
import codecs
import os
import sys
import time
import mbmp.config as config
from mbmp.classifiers import MBMA, MBMS, MBMC, MBLEM, MBPT
from mbmp.parse import MbmaParser, MbmaCKYParser
def demo(draw_parses=None, print_parses=None):
"""
A simple demo showing some basic functionality.
"""
demos = ['aandeelhoudersvergadering', 'hardloopwedstrijd']
trees = []
with MBMA() as program:
for word in demos:
print 'Parsing: %s' % word
results = program.classify(word)
trees.extend(program.trees(results))
if draw_parses is None:
print
print 'Draw parses (y/n)?',
draw_parses = sys.stdin.readline().strip().lower().startswith('y')
if draw_parses:
from nltk.draw.tree import draw_trees
print ' please wait...'
draw_trees(*trees)
if print_parses is None:
print
print 'Print parses (y/n)?',
print_parses = sys.stdin.readline().strip().lower().startswith('y')
if print_parses:
for parse in trees:
print parse
class CommandLine(argparse.ArgumentParser):
"""Commandline options for mbmp."""
def __init__(self):
argparse.ArgumentParser.__init__(self, prog = 'mbmp', description = '''
Memory-Based Morphological Parsing (MBMP), an implementation of
MBMA with extended functionality in Python based on
Van den Bosch & Daelemans (1999).
For more options, see the config.py file.''')
self.add_argument(
'-f', dest = 'trainingfile', type = str, required = False,
help = 'the path pointing to the trainingfile.')
self.add_argument(
'-i', dest = 'instancebase', type = str, required = False,
help = 'the path pointing to the instance-base.')
self.add_argument(
'-t', dest = 'testfile', type = str,
required = True, help = '''The path pointing to the testfile.
File must consist of one word per line''')
self.add_argument(
'-o', dest = 'output', type = argparse.FileType('w'),
required = False, default = sys.stdout,
help = 'the path pointing to the output file.')
self.add_argument(
'-p', dest = 'process', type = str, required = False,
default='parse',
choices = [
'parse', 'segmentize', 'lemmatize', 'pos-tagging', 'chunk'],
help = 'Choose what classification to perform.')
self.add_argument(
'--parser', dest = 'parser', type=str, required = False,
default = 'pcfg', choices = ['cfg', 'pcfg'],
help = 'Choose what parser to use.')
self.add_argument(
'--lemmatize', dest = 'morph_repr', type = str, required = False,
default = 'token',
choices = ['tokens', 'lemmas', 'tokens-and-lemmas'],
help = '''Choose how te represent the morphemes in the printed
trees. "lemmas" returns a lemmatized representation of the
morphemes, "tokens" returns the original segmentation of the
morphemes and "tokens-and-lemmas" returns a representation like
token=lemma.''')
self.add_argument(
'--pprint', dest = 'print_tree', action = 'store_const',
const=True, default = False,
help = '''Return a (pretty) hierarchical tree representation of
the parse (only works with option 'parse' and 'chunk').''')
self.add_argument(
'--port', dest = "port", type = int, required = False,
default = False, help = 'The tcp port for timblserver')
self.add_argument(
'--version', action='version', version='%(prog)s 0.3')
def main():
args = CommandLine().parse_args()
# setup the chosen classifier and load the appropriate configuration
if args.process == 'parse':
classifier, settings = MBMA, config.MBMA_CONFIG
elif args.process == 'chunk':
classifier, settings = MBMC, config.MBMC_CONFIG
elif args.process == 'segmentize':
classifier, settings = MBMS, config.MBMS_CONFIG
elif args.process == 'lemmatize':
classifier, settings = MBLEM, config.MBLEM_CONFIG
elif args.process == 'pos-tagging':
classifier, settings = MBPT, config.MBPT_CONFIG
# if another PORT is chosen, set it in CONFIG
if args.port:
config.PORT = args.port
# check if trainingfile or instancebase is an existing file and
# add this to the configuration. If no file is given we stick
# to the default file with that comes with a particular classifier
if args.trainingfile:
if not os.path.isfile(args.trainingfile):
raise IOError('Trainingfile not found')
settings['f'] = args.trainingfile
del settings['i']
elif args.instancebase:
if not os.path.isfile(args.instancebase):
raise IOError('Instancebase not found')
settings['i'] = args.instancebase
# if hierarchical parsing is chosen, initialize the parser
if args.process == 'parse' and args.print_tree:
if args.parser == 'cfg':
parser = MbmaParser
else:
sys.stderr.write('Loading PCFG...\n')
parser = MbmaCKYParser
else:
parser = None
# initialize the classifier (best to do this in a with-statement
# so that in case of any unexpected errors, the timbl server is killed.)
with classifier(config.HOST, config.PORT,
settings, parser=parser) as program:
counter = 0
count_limit = 100
args.output.write(codecs.BOM_UTF8)
# process all words each at a time
for i, word in enumerate(codecs.open(args.testfile,
encoding=config.ENCODING)):
counter += 1
word = word.strip()
if ' ' in word:
sys.stderr.write(
'No spaces allowed within words! skipping %r\n' % word)
continue
results = program.classify(word)
if args.print_tree and args.process in ('parse', 'chunk'):
if args.process == 'parse':
args.output.write(
u'# {0} {1}:\n'.format(i, word).encode('utf-8'))
trees = list(program.trees(results, mrepr=args.morph_repr))
if not trees:
args.output.write(
u' {0} {1}\n'.format(
1, program.pprint_parse(results)).encode('utf-8'))
else:
for j, tree in enumerate(trees):
args.output.write(
u' {0} {1}\n'.format(
j+1, tree.pprint(indent=5)).encode('utf-8'))
else:
args.output.write(
u'# {0} {1}:\n'.format(i, word).encode('utf-8'))
trees = program.trees(results)
args.output.write(
u'# {0}\n'.format(
trees.pprint(indent=2)).encode('utf-8'))
elif args.process == 'pos-tagging':
args.output.write(
u'{0}\t{1}\n'.format(word, results).encode('utf-8'))
else:
args.output.write(
u'# {0} {1}\t{2}\n'.format(
i, word, program.pprint_parse(results)).encode('utf-8'))
if counter == count_limit:
sys.stderr.write(
'Processed: {0} words @ {1}\n'.format(
counter, time.ctime()))
count_limit *= 2
if __name__ == '__main__':
main()
|
985,994 | 21c75ee5eb440cad21729437ce9be95ddb089e35 | import numpy as np
import cv2
import functions as fcns
path = 'center_2016_12_01_13_30_48_287.jpg'
img = cv2.imread(path)
cv2.imshow('image',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
img_flipped = np.fliplr(img)
cv2.imshow('image',img_flipped)
cv2.waitKey(0)
cv2.destroyAllWindows()
path_save = 'center_2016_12_01_13_30_48_287_flipped.jpg'
cv2.imwrite(path_save, img_flipped)
img_trans = fcns.translate(img, 3, 3)
path_save2 = 'center_2016_12_01_13_30_48_287_translated.jpg'
cv2.imwrite(path_save2, img_trans) |
985,995 | cc05a5e3b24e5648c375fbb56450990b9cecc114 | from plotting import OutputModule
from plotting.PlotStyle import setupAxes
from ROOT import Double,TGraphErrors,TLegend,vector,TMath,gPad,TPad
from plotting.RootFileHandler import commandLine
from array import array
import math
import math
from numpy import nan, dtype
import numpy as np
commandLine = OutputModule.CommandLineHandler('[Utils.py] ')
L1_PHI_BIN = math.pi/72.
L1_ETA_BIN = 0.1
def average2DHistogramBinwise(histWeights,histCounter):
for i in range(0,histWeights.GetNbinsX()):
for j in range(0,histWeights.GetNbinsY()):
if histCounter.GetBinContent(histCounter.GetBin(i,j)) != 0:
histWeights.SetBinContent(histWeights.GetBin(i,j),histWeights.GetBinContent(histWeights.GetBin(i,j))
/histCounter.GetBinContent(histCounter.GetBin(i,j)))
return histWeights
#Set axis range and labels for the 2D histograms showing E Average around L1 direction
def setupEAvplot(histE,histC = None,xmin = -0.4, xmax = 0.4, ymin = -0.4, ymax = 0.4,same = False, limitForAll = None):
if histC != None:
histE = average2DHistogramBinwise(histE,histC)
if same:
if limitForAll == None:
commandLine.output('WARNING: Requested same histogram borders for all ranges but '
'did not give limitForAll parameter. Using default values instead!')
else:
xmin = ymin = -limitForAll
xmax = ymax = limitForAll
histE.GetXaxis().SetRangeUser(xmin,xmax)
histE.GetYaxis().SetRangeUser(ymin,ymax)
histE.SetStats(0)
histE.GetXaxis().SetTitle('#Delta#eta')
histE.GetYaxis().SetTitle('#Delta#phi')
histE.GetZaxis().SetTitle('Reconstructed Energy / GeV')
setupAxes(histE)
return histE
def fillGraphIn2DHist(graph,hist):
x = Double(0)
y = Double(0)
commandLine.output('Filling graph in 2D histogram:')
nTotal = graph.GetN()
for i in range(0,nTotal):
graph.GetPoint(i,x,y)
hist.Fill(x,y)
if(not i%10000):
commandLine.printProgress(i,nTotal)
if(i == nTotal - 1):
commandLine.printProgress(nTotal, nTotal)
print
return hist
#Returns a 2D hisotgram containing the binwise difference of both objects
#Python histogram means the entries matrix coming from numpy histogram 2d
def comparePythonAndRoot2DHist(pythonHist, rootHist):
contourLevels = [-1,-.2,-.1,.1,.2,1]
if not (len(pythonHist) == rootHist.GetNbinsY() and len(pythonHist[0]) == rootHist.GetNbinsX()):
commandLine.output('Error! Cannot compare python and root histogram with different number of bins!')
return
comparisonHist = rootHist.Clone('comparison' + rootHist.GetName())
comparisonHist.Reset()
comparisonHist.SetStats(0)
comparisonHist.SetContour(len(contourLevels),array('d',contourLevels))
for j,y in enumerate(reversed(pythonHist)):
for i,x in enumerate(y):
comparisonHist.SetBinContent(i+1,len(pythonHist)-j, x - rootHist.GetBinContent( i+1 , len(pythonHist) - j))
#sys.stdout.write( str(x - histNormalBins.GetBinContent( i+1 , len(pythonHist) - j)) + '\t' )
pass
return comparisonHist
#Returns a 2D hisotgram containing the binwise difference of both objects
def compareTwoRoot2DHists(rootHist1, rootHist2):
contourLevels = [-3.4e48,-.2,-.1,.1,.2,1]
if not (rootHist1.GetNbinsX() == rootHist2.GetNbinsX() and rootHist1.GetNbinsY() == rootHist2.GetNbinsY()):
commandLine.output('Error! Cannot compare two root histograms with different number of bins!')
return
comparisonHist = rootHist1.Clone('comparison' + rootHist1.GetName())
comparisonHist.Reset()
comparisonHist.SetStats(0)
comparisonHist.SetContour(len(contourLevels),array('d',contourLevels))
for x in range(0,rootHist1.GetNbinsX()):
for y in range(0,rootHist1.GetNbinsY()):
comparisonHist.SetBinContent(x,y,rootHist1.GetBinContent(x,y) - rootHist2.GetBinContent(x,y))
pass
return comparisonHist
def extractTEfficiencyToList(tEffObject):
xVals = []
yVals = []
yErrLow = []
yErrUp = []
for i in range(tEffObject.GetPassedHistogram().GetNbinsX()):
if tEffObject.GetTotalHistogram().GetBinContent(i) != 0:
yVals.append(tEffObject.GetPassedHistogram().GetBinContent(i)/tEffObject.GetTotalHistogram().GetBinContent(i)*100)
xVals.append(tEffObject.GetTotalHistogram().GetBinCenter(i))
yErrLow.append(tEffObject.GetEfficiencyErrorLow(i))
yErrUp.append(tEffObject.GetEfficiencyErrorUp(i))
return xVals, yVals,yErrLow,yErrUp
def getTGraphErrors(x,y,ex = None,ey = None):
if (ex == None):
ex = [0]*len(x)
if (ey == None):
ey = [0]*len(x)
return TGraphErrors(len(x),array('f',x),array('f',y),array('f',ex),array('f',ey))
def getLegend(x1=.6,y1=.8,x2=.9,y2=.85):
l = TLegend(x1,y1,x2,y2)
# l.SetTextFont(62)
return l
def makeResidualsPad(pad):
pad.Divide(1,2)
pad.cd(1).SetBottomMargin(0)
pad.cd(1).SetPad(0,0.3,1,1)
pad.cd(2).SetTopMargin(0)
pad.cd(2).SetBottomMargin(0.15)
pad.cd(2).SetPad(0,0,1,0.3)
pad.cd(1)
return pad
def calcPercent(numerator, denominator):
if(denominator == 0):
commandLine.error('Tried to divide by 0')
return nan
return numerator/float(denominator)*100
def getXinNDC(x):
gPad.Update()
return (x - gPad.GetX1())/(gPad.GetX2()-gPad.GetX1())
def phiWrapCheck(phi2,phi1):
delta_phi = phi2 - phi1;
if(delta_phi < -math.pi):
return (2*math.pi + delta_phi)
if(delta_phi > math.pi):
return (delta_phi - 2*math.pi)
return delta_phi
def getMedian(th1d):
n = th1d.GetXaxis().GetNbins()
xVect = vector('double')(n)
print xVect
xVect = np.array(xVect)
print xVect
th1d.GetXaxis().GetCenter( xVect )
print xVect
yVect = th1d.GetArray()
print yVect
yVect.SetSize(n)
print yVect
yVect = np.array(yVect)
print yVect
print np.median([xVect,yVect])
print TMath.Median(n,xVect,yVect)
# const double * y = h1->GetArray();
# // exclude underflow/overflows from bin content array y
# return TMath::Median(n, &x[0], &y[1]);
|
985,996 | b968f17c056f6eaba1884e5f886b4a9ab054c6ab | # coding=utf-8
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
class Course(models.Model):
name = models.CharField(max_length=40)
short_summary = models.TextField(max_length=200)
description = models.TextField()
organisation = models.CharField(max_length=30)
logo = models.FileField(upload_to='logos')
start_date = models.DateField()
end_date = models.DateField()
class Meta:
permissions = (('can_apply', 'Can apply for Course'),)
def __repr__(self):
return "%s %s" % (self.name, str(self.start_date))
def __unicode__(self):
return unicode("%s %s" % (self.name, str(self.start_date)))
class Lecture(models.Model):
name = models.CharField(max_length=100)
youtube_video_id = models.CharField(max_length=32)
order = models.IntegerField(unique=True)
date = models.DateField()
course = models.ForeignKey(Course)
def __repr__(self):
return self.name
def __unicode__(self):
return unicode(self.name)
class Document(models.Model):
name = models.CharField(max_length=200)
course = models.ForeignKey(Course, blank=True)
doc = models.FileField(upload_to='docs')
upload_date = models.DateTimeField()
appear_date = models.DateTimeField()
class Feedback(models.Model):
body = models.TextField()
def __repr__(self):
return self.body
def __unicode__(self):
return "feedback %s" % self.body
class UserProfile(models.Model):
user = models.OneToOneField(User, unique=True)
courses = models.ManyToManyField(Course, blank=True)
def is_student(self):
return True if (len(self.user.groups.filter(name='students')) > 0) else False
def is_instructor(self):
return True if (len(self.user.groups.filter(name='instructors')) > 0) else False
def __repr__(self):
return str(self.user)
def __unicode__(self):
return "%s's profile" % self.user
def create_user_profile(sender, instance, created, **kwargs):
if created:
profile, created = UserProfile.objects.get_or_create(user=instance)
post_save.connect(create_user_profile, sender=User)
|
985,997 | 6a8f34038f9b182e54e03175f7b93da38a867406 | import requests
import progressbar as pb
import os
def download_file(url, file_name, dest_dir):
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
full_path_to_file = dest_dir + os.path.sep + file_name
if os.path.exists(dest_dir + os.path.sep + file_name):
return full_path_to_file
print("Downloading " + file_name + " from " + url)
try:
r = requests.get(url, allow_redirects=True, stream=True)
except:
print("Could not establish connection. Download failed")
return None
file_size = int(r.headers['Content-Length'])
chunk_size = 1024
num_bars = round(file_size / chunk_size)
bar = pb.ProgressBar(maxval=num_bars).start()
if r.status_code != requests.codes.ok:
print("Error occurred while downloading file")
return None
count = 0
with open(full_path_to_file, 'wb') as file:
for chunk in r.iter_content(chunk_size=chunk_size):
file.write(chunk)
bar.update(count)
count +=1
return full_path_to_file
|
985,998 | ca713201f48ac8fa5eff2883c4326e994763c692 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import jinja2
import os
import logging
import re
import string
import hashlib
import random
import time
import datetime
import cgi
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.api import images
from urlparse import urljoin
from bs4 import BeautifulSoup, Comment
jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
def authenticate(auth_token):
if db.GqlQuery("SELECT * FROM TrustedPartner WHERE local_auth_token = :1", auth_token) != None:
return True
else:
return False
def sanitizeHTML(value, base_url=None):
rjs = r'[\s]*(&#x.{1,7})?'.join(list('javascript:'))
rvb = r'[\s]*(&#x.{1,7})?'.join(list('vbscript:'))
re_scripts = re.compile('(%s)|(%s)' % (rjs, rvb), re.IGNORECASE)
validTags = 'p i strong b u a h1 h2 h3 pre br img input'.split()
validAttrs = 'href src width height class name id type value'.split()
urlAttrs = 'href src'.split() # Attributes which should have a URL
soup = BeautifulSoup(value)
for comment in soup.findAll(text=lambda text: isinstance(text, Comment)):
# Get rid of comments
comment.extract()
for tag in soup.findAll(True):
if tag.name not in validTags:
tag.hidden = True
attrs = dict(tag.attrs)
tag.attrs = {}
for attr, val in attrs.iteritems():
if attr in validAttrs:
val = re_scripts.sub('', val) # Remove scripts (vbs & js)
if attr in urlAttrs:
val = urljoin(base_url, val) # Calculate the absolute url
tag.attrs[attr] = val
ret = soup.renderContents().decode('utf8')
#if strip_quotes:
# ret = re.sub(r"[\"']", '', ret)
return ret
jinja_environment.globals.update(sanitizeHTML=sanitizeHTML)
jinja_environment.globals.update(escape=cgi.escape)
def quick_sanitize(input):
return re.sub(r"[^a-zA-Z0-9 \$\%\-_\!\.&=/:\?]", '', input)
def render_template(handler_object, file_name, template_values):
user = users.get_current_user()
if user:
current_li = db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", user.user_id()).get()
else:
current_li = None
if current_li:
template_values['is_admin'] = current_li.is_admin
else:
template_values['is_admin'] = users.is_current_user_admin()
template_values['current_li'] = current_li
template_values['user'] = user
template_values['logout_url'] = users.create_logout_url('/')
template_values['login_url'] = users.create_login_url('/users/verify_user/')
if user:
li = db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", user.user_id()).get()
template_values['unread_messages'] = db.GqlQuery("SELECT * FROM Message WHERE recipient_id = :1 AND read = :2", user.user_id(), False).count()
if li and not(li.is_active):
file_name = '/users/inactive_notification.html'
#check to make sure they've registered (and check for infinite redirects)
if user and string.find(handler_object.request.uri, '/users/verify_user/') == -1 and current_li and (current_li.first_name == "" or current_li.last_name == "" or current_li.nickname == ""):
handler_object.redirect('/users/verify_user/')
else:
template = jinja_environment.get_template(file_name)
handler_object.response.out.write(template.render(template_values))
def get_user(user_id):
return db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", user_id).get()
def get_current_li():
if users.get_current_user():
return db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", users.get_current_user().user_id()).get()
return None
class LoginInformation(db.Model):
first_name = db.StringProperty()
last_name = db.StringProperty()
#belongs_to User
user_id = db.StringProperty()
email = db.StringProperty()
is_active = db.BooleanProperty()
is_admin = db.BooleanProperty()
avatar = db.BlobProperty()
nickname = db.StringProperty()
private = db.BooleanProperty()
xsrf_token = db.StringProperty()
desc = db.TextProperty()
external_user = db.BooleanProperty()
def display_avatar(this):
if this.avatar:
return '<img src="/images/?avatar_id=' + this.user_id + '"/>'
else:
return '<img src="/img/default_user.png" />'
def get_private_display_name(this):
return this.first_name + " " + this.last_name
def get_public_display_name(this):
return this.nickname
def get_display_name(this):
li = get_current_li()
if (this.private == False) or (li and this.user_id == li.user_id):
return this.get_private_display_name()
else:
return this.get_public_display_name()
def create_xsrf_token(this):
#create a token based off of their name, random number, id, and time, then hash via sha512
#won't scale too great, but should be pretty secure
random.seed(os.urandom(32))
this.xsrf_token = hashlib.sha512(str(random.random()) + this.last_name + str(this.key()) + this.first_name + str(time.clock())).hexdigest()
logging.info("created xsrf_token " + this.xsrf_token)
this.put()
return this.xsrf_token
def verify_xsrf_token(this, request):
#change the token to make it obsolete
old_token = this.xsrf_token
this.xsrf_token = hashlib.sha512(this.xsrf_token).hexdigest()
this.put()
logging.info("__li id: " + str(this.key().id()))
if old_token == request.request.get('xsrf_token'):
return True
else:
logging.info("verify_xsrf_token failed: " + old_token + ", " + request.request.get('xsrf_token'))
return False
def get_average_rating(this):
#grab all ratings
ratings = db.GqlQuery("SELECT * FROM UserFeedback WHERE for_user_id = :1", this.user_id)
total = 0
for r in ratings:
total += r.rating
if(ratings.count() > 0):
return '%.2f' % float(float(total)/float(ratings.count()))
else:
return None
def create_external_user(user_id):
li = LoginInformation(first_name="Anonymous", last_name="User", email="anon@anon.com", user_id=user_id, is_active=True, is_admin=False, nickname="Anon", private=False, external_user=True)
try:
li.put()
except TransactionFailedError:
return None
return li
class Thread(db.Model):
title = db.StringProperty()
created_at = db.DateTimeProperty(auto_now_add=True)
recipient_id = db.StringProperty()
#has_many messages
#belongs_to User
created_by_id = db.StringProperty()
item_details = db.StringProperty()
external_conversation = db.BooleanProperty()
external_conversation_id = db.StringProperty()
partner_id = db.StringProperty()
item_id = db.StringProperty()
def messages(this):
return db.GqlQuery("SELECT * FROM Message WHERE ANCESTOR is :1", this.key())
def get_recipient(this):
return db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", this.recipient_id).get()
def get_creator(this):
return db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", this.created_by_id).get()
class Message(db.Model):
body = db.TextProperty()
created_at = db.DateTimeProperty(auto_now_add=True)
created_by_id = db.StringProperty()
recipient_id = db.StringProperty()
read = db.BooleanProperty()
#belongs_to Thread
def get_sender(this):
return db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", this.created_by_id).get()
def save_message(message, thread, user):
message.parent = thread
message.put()
class ItemCollection(db.Model):
title = db.StringProperty()
created_at = db.DateTimeProperty(auto_now_add=True)
created_by_id = db.StringProperty()
items = db.ListProperty(int,indexed=False,default=None)
def get_items(this):
item_collection = []
for id in this.items:
item_obj = db.get(db.Key.from_path('Item', id))
if item_obj:
item_collection.append(item_obj)
return item_collection
class Item(db.Model):
title = db.StringProperty()
description = db.TextProperty()
summary = db.TextProperty()
price = db.FloatProperty()
created_at = db.DateTimeProperty(auto_now_add=True)
expiration_date = db.DateProperty()
image = db.BlobProperty()
is_active = db.BooleanProperty()
deactivated = db.BooleanProperty()
feedback = db.TextProperty()
rating = db.IntegerProperty()
buyer_id = db.StringProperty()
#belongs_to User
created_by_id = db.StringProperty()
bidding_enabled = db.BooleanProperty()
highest_bid = db.StringProperty()
highest_bid_id = db.StringProperty()
sold = db.BooleanProperty()
sponsored = db.BooleanProperty()
def is_expired(this):
return (datetime.date.today() > this.expiration_date)
def display_image(this):
if this.image:
return '<img src="/images/?item_id=' + str(this.key().id()) + '"/>'
else:
return ''
def display_image_url(this):
if this.image:
return '/images/?item_id=' + str(this.key().id())
else:
return ''
def get_creator(this):
return db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", this.created_by_id).get()
def get_feedback(this):
return db.GqlQuery("SELECT * FROM ItemFeedback WHERE item_id = :1", str(this.key().id()))
class UserFeedback(db.Model):
created_by_id = db.StringProperty()
created_at = db.DateTimeProperty(auto_now_add=True)
for_user_id = db.StringProperty()
rating = db.IntegerProperty()
def get_creator(this):
return db.GqlQuery("SELECT * FROM LoginInformation WHERE user_id = :1", this.created_by_id).get()
class ItemFeedback(db.Model):
created_by_id = db.StringProperty()
created_at = db.DateTimeProperty(auto_now_add=True)
item_id = db.StringProperty()
rating = db.IntegerProperty()
feedback = db.TextProperty()
class Search(db.Model):
created_by_id = db.StringProperty()
search = db.StringProperty()
class IsTestDataLoaded(db.Model):
is_test_data_loaded = db.BooleanProperty()
class TrustedPartner(db.Model):
name = db.StringProperty()
base_url = db.StringProperty()
local_auth_token = db.StringProperty()
foreign_auth_token = db.StringProperty()
class Suggestion(db.Model):
query = db.StringProperty()
|
985,999 | 0e86e694f1d48268023231e5858848b032e175fb | # Necessary imports. Provides library functions to ease writing tests.
from lib import prebuild, testcase, SUBMITTY_INSTALL_DIR
import subprocess
import os
import glob
import shutil
import traceback
############################################################################
# COPY THE ASSIGNMENT FROM THE SAMPLE ASSIGNMENTS DIRECTORIES
SAMPLE_ASSIGNMENT_CONFIG = SUBMITTY_INSTALL_DIR + "/more_autograding_examples/input_output_subdirectories/config"
SAMPLE_SUBMISSIONS = SUBMITTY_INSTALL_DIR + "/more_autograding_examples/input_output_subdirectories/submissions"
@prebuild
def initialize(test):
try:
os.mkdir(os.path.join(test.testcase_path, "assignment_config"))
except OSError:
pass
try:
data_path = os.path.join(test.testcase_path, "data")
if os.path.isdir(data_path):
shutil.rmtree(data_path)
os.mkdir(data_path)
except OSError:
pass
subprocess.call(["cp",
os.path.join(SAMPLE_ASSIGNMENT_CONFIG, "config.json"),
os.path.join(test.testcase_path, "assignment_config")])
subprocess.call(["cp"] + ["-r"] +
glob.glob(os.path.join(SAMPLE_ASSIGNMENT_CONFIG, "test_input", "*")) +
[data_path])
############################################################################
def cleanup(test):
subprocess.call(["rm"] + ["-rf"] +
glob.glob(os.path.join(test.testcase_path, "data", "test*")))
subprocess.call(["rm"] + ["-f"] +
glob.glob(os.path.join(test.testcase_path, "data", "results*")))
subprocess.call(["rm"] + ["-f"] +
glob.glob(os.path.join(test.testcase_path, "data", "*.cpp")))
os.mkdir(os.path.join(test.testcase_path, "data", "test_output"))
subprocess.call(["cp"] + ["-r"] +
glob.glob(os.path.join(SAMPLE_ASSIGNMENT_CONFIG, "test_output", "*")) +
[os.path.join(test.testcase_path, "data", "test_output")])
@testcase
def schema_validation(test):
cleanup(test)
config_path = os.path.join(test.testcase_path, 'assignment_config', 'complete_config.json')
try:
test.validate_complete_config(config_path)
except Exception:
traceback.print_exc()
raise
@testcase
def correct(test):
cleanup(test)
subprocess.call(["cp",
os.path.join(SAMPLE_SUBMISSIONS, "correct.cpp"),
os.path.join(test.testcase_path, "data/code.cpp")])
test.run_compile()
test.run_run()
test.run_validator()
test.diff("grade.txt","grade.txt_correct","-b")
test.json_diff("results.json","results.json_correct")
@testcase
def buggy(test):
cleanup(test)
subprocess.call(["cp",
os.path.join(SAMPLE_SUBMISSIONS, "buggy.cpp"),
os.path.join(test.testcase_path, "data/code.cpp")])
test.run_compile()
test.run_run()
test.run_validator()
test.diff("grade.txt","grade.txt_buggy","-b")
test.json_diff("results.json","results.json_buggy")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.