content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
import lit.formats
from lit.llvm import llvm_config
config.name = 'Nacro'
config.test_format = lit.formats.ShTest(True)
config.suffixes = ['.c', '.cpp', '.cc']
config.excludes = ['CMakeLists.txt']
config.test_source_root = os.path.dirname(__file__)
config.test_exec_root = os.path.join(config.nacro_obj_root, 'test')
config.substitutions.append(('%clang',
os.path.join(config.llvm_bin_dir, 'clang')))
config.substitutions.append(('%FileCheck', config.filecheck_path))
# FIXME: What about .dylib?
config.substitutions.append(('%NacroPlugin',
os.path.join(config.nacro_obj_root, 'NacroPlugin.so')))
|
nilq/baby-python
|
python
|
import requests
from dhooks import Webhook
def banner():
print("""
Fuck Off Loggers
Input Webhook URL
""")
def deleter():
start = input(">")
hook = Webhook(start)
hook.send("Stop logging shit whore")
x = requests.delete(start)
banner()
deleter()
# Simple shit can be used for anything besides robloxloggers
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
'''
This script reads program files and concatenates the beginning of
all files to create a input prompt which is then fed to OpenAI
Codex to generate a README.
'''
import sys
# Check if the openai module is installed.
try:
import openai
except ImportError:
print('openai module not found. Try running "pip3 install openai"')
sys.exit(1)
import os
import argparse
import configparser
FILES_NOT_TO_INCLUDE = ['LICENSE', 'README.md']
STREAM = True
cur_dir_not_full_path = os.getcwd().split('/')[-1]
README_START = f'# {cur_dir_not_full_path}\n## What is it?\n'
# Get config dir from environment or default to ~/.config
CONFIG_DIR = os.getenv('XDG_CONFIG_HOME', os.path.expanduser('~/.config'))
API_KEYS_LOCATION = os.path.join(CONFIG_DIR, 'openaiapirc')
def create_template_ini_file():
"""
If the ini file does not exist create it and add the organization_id and
secret_key
"""
if not os.path.isfile(API_KEYS_LOCATION):
with open(API_KEYS_LOCATION, 'w') as f:
f.write('[openai]\n')
f.write('organization_id=\n')
f.write('secret_key=\n')
print('OpenAI API config file created at {}'.format(API_KEYS_LOCATION))
print('Please edit it and add your organization ID and secret key')
print('If you do not yet have an organization ID and secret key, you\n'
'need to register for OpenAI Codex: \n'
'https://openai.com/blog/openai-codex/')
sys.exit(1)
def initialize_openai_api():
"""
Initialize the OpenAI API
"""
# Check if file at API_KEYS_LOCATION exists
create_template_ini_file()
config = configparser.ConfigParser()
config.read(API_KEYS_LOCATION)
openai.organization_id = config['openai']['organization_id'].strip('"').strip("'")
openai.api_key = config['openai']['secret_key'].strip('"').strip("'")
def create_input_prompt(length=3000):
input_prompt = ''
files_sorted_by_mod_date = sorted(os.listdir('.'), key=os.path.getmtime)
# Reverse sorted files.
files_sorted_by_mod_date = files_sorted_by_mod_date[::-1]
for filename in files_sorted_by_mod_date:
# Check if file is a image file.
is_image_file = False
for extension in ['.png', '.jpg', '.jpeg', '.gif', '.bmp', '.svg']:
if filename.endswith(extension):
is_image_file = True
break
if filename not in FILES_NOT_TO_INCLUDE and not filename.startswith('.') \
and not os.path.isdir(filename) and not is_image_file:
with open(filename) as f:
input_prompt += '\n===================\n# ' + filename + ':\n'
input_prompt += f.read() + '\n'
input_prompt = input_prompt[:length]
input_prompt += '\n\n===================\n# ' + 'README.md:' + '\n'
input_prompt += README_START
return input_prompt
def generate_completion(input_prompt, num_tokens):
response = openai.Completion.create(engine='code-davinci-001', prompt=input_prompt, temperature=0.5, max_tokens=num_tokens, stream=STREAM, stop='===================\n')
return response
def clear_screen_and_display_generated_readme(response):
# Clear screen.
os.system('cls' if os.name == 'nt' else 'clear')
generated_readme = ''
print(README_START)
generated_readme = README_START
while True:
next_response = next(response)
completion = next_response['choices'][0]['text']
# print("completion:", completion)
# print(next(response))
print(completion, end='')
generated_readme = generated_readme + completion
if next_response['choices'][0]['finish_reason'] != None: break
return generated_readme
def save_readme(readme_text):
'''
Saves the readme.
If a readme already exists ask the user whether he wants
to overwrite it.
'''
if os.path.isfile('README.md'):
answer = input('A README.md already exists. Do you want to overwrite it? [y/N] ')
if answer == '' or answer == 'n' or answer == 'N':
print('\nThe README was not saved.')
return
with open('README.md', 'w') as f:
f.write(readme_text)
print('\nREADME.md saved.')
def generate_until_accepted(input_prompt, num_tokens):
'''
Generate new readmes and ask the user if he wants to save the generated
readme.
'''
while True:
response = generate_completion(input_prompt, num_tokens)
generated_readme = clear_screen_and_display_generated_readme(response)
# Ask the user if he wants to save the generated readme.
answer = input("\n\nDo you want to save the generated README? [y/N] ")
if answer == '' or answer == 'n' or answer == 'N':
print('\nThe generated README is not saved.')
continue
elif answer == 'y' or answer == 'Y':
save_readme(generated_readme)
answer = input("\n\nDo you want to generate another README? [Y/n] ")
if answer == '' or answer == 'y' or answer == 'Y':
continue
break
def get_args():
# Get the number of tokens as positional argument.
parser = argparse.ArgumentParser()
parser.add_argument("--tokens", type=int, default=256)
args = parser.parse_args()
return args
if __name__ == '__main__':
args = get_args()
initialize_openai_api()
input_prompt = create_input_prompt()
generate_until_accepted(input_prompt, args.tokens)
|
nilq/baby-python
|
python
|
"""
Connects to a given SP3 instance and sends the given packets.
"""
import argparse
import asyncio
import binascii as bi
import colors
import json
import netifaces
import time
from scapy.all import IP, TCP, Raw
import websockets
def get_packets(public_ip, victim_ip, protocol, sport):
"""
Returns a list of packets (represented by bytes) to spoof through SP3.
Args:
victim_ip (str): the IP address to spoof packets from
protocol (str): which payload to use (http, https, esni, or garbage data)
sport (int): source port to use for packets
"""
if protocol == "http":
payload = b"GET /?q=ultrasurf HTTP/1.1\r\nHost: youporn.com\r\n\r\n"
elif protocol == "https":
payload = bi.unhexlify("16030101400100013c0303a6308d7e4350bbb358b2775fdc299883a29bf1bde3a61c3298f0ca18909434790000aac030c02cc028c024c014c00a00a500a300a1009f006b006a0069006800390038003700360088008700860085c032c02ec02ac026c00fc005009d003d00350084c02fc02bc027c023c013c00900a400a200a0009e00670040003f003e0033003200310030009a0099009800970045004400430042c031c02dc029c025c00ec004009c003c002f00960041c011c007c00cc00200050004c012c008001600130010000dc00dc003000a00ff0100006900000010000e00000b796f75706f726e2e636f6d000b000403000102000a001c001a00170019001c001b0018001a0016000e000d000b000c0009000a00230000000d0020001e060106020603050105020503040104020403030103020303020102020203000f000101")
elif protocol == "esni":
payload = bi.unhexlify("16030103ae010003aa0303d992f9c22fbe7a7cdbc9619924bd9cc13c057f5f3da1829426cb0944292705152033c5be80af6de7633e07680125e27e3f7b80ff5e9b3cbe5278434c90b9e0e5fa0024130113031302c02bc02fcca9cca8c02cc030c00ac009c013c014009c009d002f0035000a0100033d00170000ff01000100000a000e000c001d00170018001901000101000b000201000010000e000c02683208687474702f312e310005000501000000000033006b0069001d002019570ada256d971048b34d3e9ff5607588bf10cfb6c064fc45a0fc401d9a7c470017004104ea047fd2e0fc3314de4bf03ee6205134f0d15c07f62b77625a95dc194ce8fb88cc16e53c8b400ba463915b87480b247851c095abdb0d3d5d5b14dd77dcd73750002b00050403040303000d0018001604030503060308040805080604010501060102030201002d00020101ffce016e1301001d00203652aaf122dc47dcf9fa8c37377476d050e54119adfb518f7aabd842ac97d23b00205a30e70593f57708370310ecf7054e488a62eb11e01fd059851c442d453d15c5012441910eec152c4df5ff28bf5cddb1a2e54e8595197e3dc36325145ad50a7842eb3860c8fc6ac5c1794017101365c6122abb3b81f31f5f4204eebb244252d22600734424d875948657b892d3aab3310491aff3b5126f1186bd9c321fb446cf2a41985dd206364ea28c3f8aafeafc62e039f157c3f2703a35448d2d16dcf2d5055ce58c024a5b4eb780fc5128af4ba4e90d6eef1b3cf30a5b2000448d65d6af4fffabeb91e1ed2093fdcc6ffd87ceb94429864ddb657e6316654631193fd25840e51645e1708d351140dd6eeefb80ddbaebb250b2975a1d5f291d99f89de4553d083f1b9820a3ee6976357cff433b7eb77febb3eb0db012154154d3e19b4409f8afa11aa1baeb0b7663d97f0caca2b11ed971fc574588e76a37aa4259593fe8e07fbbca27fa001c00024001002900eb00c600c07f87fafe9de4168227aeec4540f1aaeae43ff61a353f5480420ac3c33f90003fe6f501080bf04f22576a0cc1db8dc83d37b25859a81ce0277364a1794cde1c60f3b94175477beff56db7f9e2b83b31383b7d8b5da20834fb0a63d7ba2e42ad3dfa21666ed8621f34273ac5c273d7f492750e3df3bae36e398ddf83d4a7c36f639087f14eb1f7bfb2c7c0c736d69bcdbf21158c07b7088b95e5bcd08138d6b511f6492d7d93bb3729641519097b970cfeffa5882c67111dcf5d7966a1c58b4edb6e8c905a002120e47ccba37d89e4c1d979c6ef954d1cd946eff0d3119aa2b4d6411138aec74579")
else:
payload = b"nonsense data"
pkt = IP(dst=public_ip, src=victim_ip)/TCP(dport=80, sport=sport, seq=100, flags="S")
pkt2 = IP(dst=public_ip, src=victim_ip)/TCP(dport=80, sport=sport, seq=101, flags="PA")/Raw(payload)
return [bytes(pkt), bytes(pkt2)]
def traceroute_helper(public_ip, victim_ip, sport, ttl):
"""
Helps run a traceroute by returning a packet with the given TTL.
"""
return bytes(IP(dst=public_ip, src=victim_ip, ttl=ttl)/TCP(dport=80, sport=sport, seq=100, flags="S"))
def get_ip():
"""
Gets the IP address of the first interface on this computer.
"""
for iface in netifaces.interfaces():
if "lo" in iface:
continue
iface_info = netifaces.ifaddresses(iface)
if netifaces.AF_INET not in iface_info:
continue
return iface_info[netifaces.AF_INET][0]['addr']
return None
async def consent(uri, public_ip, protocol, sport, victim_ip, perform_sp3_traceroute):
"""
Connects to the given SP3 insance and holds open a connection.
Args:
- uri (str): URI of a SP3 instance (ws://ip:port)
- public_ip (str): public facing IP address of this machine
- protocol (str): http or https or malformed
- sport (int): source port
- victim_ip (str): IP address to spoof packets from
- perform_sp3_traceroute (bool): whether or not we should perform a traceroute instead
"""
print(colors.color("Connecting to SP3 server %s to spoof traffic to %s..." % (uri, public_ip), fg='yellow'))
# Authorize for this destination address with a websockets authentication.
info = {"DestinationAddress": public_ip, "AuthenticationMethod": 0}
async with websockets.connect(uri) as websocket:
print(colors.color("Connected to SP3", fg='green'))
await websocket.send(json.dumps(info))
response = await websocket.recv()
response = json.loads(response)
if response["Status"] != 0:
print(colors.color("ERROR: Unexpected status from SP3.", fg='red'))
print(response)
return
# Supply the challenge given, NOT the challenge it just returned to us in the above response.
ready = {"DestinationAddress": public_ip, "Challenge": response["Challenge"]}
await websocket.send(json.dumps(ready))
response = await websocket.recv()
response = json.loads(response)
if response["Status"] != 0:
print(colors.color("ERROR: Unexpected status from SP3.", fg='red'))
print(response)
return
if perform_sp3_traceroute:
print(colors.color("Launching SP3 traceroute: spoofing 30 packets through SP3", fg='green'))
for ttl in range(0, 30):
await websocket.send(traceroute_helper(public_ip, victim_ip, sport, ttl))
time.sleep(0.1)
print("TTL %d: sent." % ttl)
pkts = get_packets(public_ip, victim_ip, protocol, sport)
print(colors.color("Completed SP3 handshake: spoofing %d packets through SP3" % len(pkts), fg='green'))
num_resends = 10
for i in range(num_resends):
c = 0
for pkt in pkts:
c += 1
await websocket.send(bytes(pkt))
pkts = get_packets(public_ip, victim_ip, protocol, sport)
print(colors.color("Sent %d packets (%d times)" % (len(pkts), num_resends), fg='green'))
def get_args():
"""
Sets up arg parsing.
"""
parser = argparse.ArgumentParser(description="SP3 Spoofing Script")
parser.add_argument("--public-ip", default=get_ip(), type=str, help="IP address of this computer")
parser.add_argument("--victim-ip", required=True, type=str, help="IP address of victim computer (who traffic should be spoofed as)")
parser.add_argument("--protocol", default="http", choices=('http', 'https', 'malformed', 'esni'), type=str, help="payload protocol to send with.")
parser.add_argument("--sport", type=int, help="source port to use")
parser.add_argument("--perform-sp3-traceroute", action='store_true', help="instead of launching the attack, perform an sp3 traceroute")
parser.add_argument("--sp3", default="ws://192.26.136.232:8080/sp3", type=str, help="The URI IP:port of the sp3 server")
return parser.parse_args()
def main(args):
"""
Calls the consent function with the asyncio event loop.
"""
asyncio.get_event_loop().run_until_complete(consent(args.sp3, args.public_ip, args.protocol, args.sport, args.victim_ip, args.perform_sp3_traceroute))
if __name__ == "__main__":
main(get_args())
|
nilq/baby-python
|
python
|
from .psg_extractors import extract_psg_data
from .hyp_extractors import extract_hyp_data
from .header_extractors import extract_header
|
nilq/baby-python
|
python
|
from flask import Flask
app = Flask(__name__)
app.config['DEBUG'] = True
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
import sys
sys.path.insert(0, 'lib')
import json
import urllib, urllib2
import httplib
import time
from BeautifulSoup import BeautifulSoup
import requests
from datetime import datetime
from email.mime.text import MIMEText as MIME
import traceback
import smtplib
# Credentials (if needed)
PARSE_APP_ID = "your parse app id here"
PARSE_REST_API_KEY = "your parse rest api key here"
def minutes_left(proj):
deadline = proj["deadline"]
current = time.time()
minutes_left = (deadline-current)/60
return minutes_left
def soupify(url):
print "SOUPIFYING"
data = urllib2.urlopen(url)
print "URL is "+url
#data = r.text
#print "data: "+data[:100]
soup = BeautifulSoup(data)
return soup
def pretty_print(project):
print json.dumps(data['projects'][0],sort_keys=True,indent=4, separators=(',',': '))
def epoch_to_iso8601(timestamp):
date = {"__type": "Date","iso": datetime.fromtimestamp(timestamp).isoformat()+".000Z"}
print date
return date
def save(cxn,project):
cxn.request('POST', '/1/classes/Project', json.dumps(project), {
"X-Parse-Application-Id": PARSE_APP_ID,
"X-Parse-REST-API-Key": PARSE_REST_API_KEY,
"Content-Type": "application/json"
})
result = json.loads(cxn.getresponse().read())
return result
def create(project):
try:
#dictionary comprehension
good_keys = ["backers_count","slug","blurb","country","currency","goal","name","pledged"]
good = { key: project[key] for key in good_keys }
#flattening out nested dictionaries
good["category"] = project["category"]["name"]
good["project_deadline"] = epoch_to_iso8601(project["deadline"])
good["creation_date"] = epoch_to_iso8601(project["created_at"])
good["launch_date"] = epoch_to_iso8601(project["launched_at"])
good["project_url"] = project["urls"]["web"]["project"]
good["rewards_url"] = project["urls"]["web"]["rewards"]
good["proj_id"] = project["id"]
good["image"] = project["photo"]["1024x768"]
good["user_id"] = project["creator"]["id"]
#initialize scraper
url = good['project_url']
print "#################\nURL: "+url+"\n#######################"
soup = soupify(url)
#scrape campaign data
description_div = soup.findAll(attrs={"class":"full-description js-full-description responsive-media formatted-lists"})
print "Desc_div: "+str(len(description_div))
if description_div:
description = description_div[0]
good["campaign_text"] = description.text
video_player = soup.findAll("div", {"class": "video-player"})
if video_player:
video = video_player[0]
good["campaign_video"] = video["data-video-url"]
desc_imgs = description.findAll("img")
if desc_imgs:
good["campaign_images"] = [div["src"] for div in desc_imgs]
desc_iframes = description.findAll("iframe")
if desc_iframes:
good["campaign_secondary_videos"] = [div["src"] for div in desc_iframes]
else:
print "No description found."
return good
except:
tb = traceback.format_exc()
print tb
#server = setup_server()
#send(server,tb)
#server.close()
return None
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.route('/scrape')
def scrape():
page = 1
more = True
while more:
data = json.load(urllib2.urlopen('https://www.kickstarter.com/discover/advanced.json?google_chrome_workaround&page='+str(page)+'&category_id=0&woe_id=0&sort=end_date'))
projects = data["projects"]
connection = httplib.HTTPSConnection('api.parse.com', 443)
connection.connect()
for project in projects:
if minutes_left(project)<10:
final = create(project)
if final:
print final["name"]
#check for duplicate
params = urllib.urlencode({"where":json.dumps({
"proj_id": final["proj_id"]
}),"count":1,"limit":0})
connection.request('GET', '/1/classes/Project?%s' % params, '', {
"X-Parse-Application-Id": PARSE_APP_ID,
"X-Parse-REST-API-Key": PARSE_REST_API_KEY
})
result = json.loads(connection.getresponse().read())
print "Duplicates checK:"
print result
duplicates = result["count"]
if duplicates == 0:
print "No duplicates, saving object."
resp = save(connection,final)
print resp
else:
print "Duplicate found. Not saving object."
else:
print "Not enough time. Breaking out of loop."
more = False
break
connection.close()
print "Cxn closed."
page = page + 1
print "SCRAPE SUCCESSFUL."
return "Scrape successful."
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, nothing at this URL.', 404
|
nilq/baby-python
|
python
|
from flask import Flask
def create_app(**config_overrides):
app = Flask(__name__)
# Load default config then apply overrides
app.config.from_object('config.config')
app.config.update(config_overrides)
app.url_map.strict_slashes = False
from .views import views
app.register_blueprint(views)
return app
|
nilq/baby-python
|
python
|
#******************************************************************************************
# Copyright (c) 2019 Hitachi, Ltd.
# All rights reserved. This program and the accompanying materials are made available under
# the terms of the MIT License which accompanies this distribution, and is available at
# https://opensource.org/licenses/mit-license.php
#
# March 1st, 2019 : First version.
#******************************************************************************************
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import tutorials.tf_model.tf_model as tf_model
from neuron_coverage.tensorflow_native.structutil import NetworkStruct
data_dir = r'C:\Users\yrl-user\Downloads\MNIST_data'
mnist = input_data.read_data_sets(data_dir, one_hot=True)
sess = tf.InteractiveSession()
def loss(logits, labels):
cross_entropy = -tf.reduce_sum(labels*tf.log(logits))
tf.summary.scalar("cross_entropy", cross_entropy)
return cross_entropy
def training(loss, learning_rate):
train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss)
return train_step
if __name__ == "__main__":
with tf.Graph().as_default():
network_struct = NetworkStruct()
x_image = tf.placeholder("float", shape=[None, 784])
network_struct.set_input(x_image)
y_label = tf.placeholder("float", shape=[None, 10])
network_struct.set_input(y_label)
W = tf.Variable(tf.zeros([784,10]))
b = tf.Variable(tf.zeros([10]))
keep_prob = tf.placeholder("float")
network_struct.set_input(keep_prob)
logits = tf_model.model_construction.interence(x_image, keep_prob)
network_struct.set_output(logits)
loss_value = loss(logits, y_label)
train_op = training(loss_value,1e-4)
accur = tf_model.model_construction.accuracy(logits, y_label)
init_op = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init_op)
summary_op = tf.summary.merge_all()
summary_writer = tf.summary.FileWriter("./tmp/data", graph=sess.graph)
saver = tf.train.Saver()
init = tf.global_variables_initializer()
sess.run(init)
for step in range(2000):
batch = mnist.train.next_batch(50)
if step % 100 == 0:
train_accury = sess.run(accur, feed_dict={x_image: batch[0], y_label: batch[1], keep_prob: 1.0})
print("step%d, train_accury : %g"%(step, train_accury))
sess.run(train_op, feed_dict={x_image: batch[0], y_label: batch[1], keep_prob:0.5})
summary_str = sess.run(summary_op, feed_dict={x_image: batch[0], y_label: batch[1], keep_prob: 1.0})
summary_writer.add_summary(summary_str, step)
summary_writer.flush()
print("test accuracy : %g" %sess.run(accur, feed_dict={x_image: mnist.test.images[0:1000], y_label: mnist.test.labels[0:1000], keep_prob: 1.0}))
network_struct.set_info_by_session(sess)
network_struct.save(sess, "./tf_ckpt/model.ckpt")
|
nilq/baby-python
|
python
|
from typing import List
class Solution:
@staticmethod
def two_sum(nums: List[int], target: int) -> List[int]:
for i in range(len(nums)):
for j in range(1, len(nums)):
if (nums[i] + nums[j]) == target:
return [i, j]
if __name__ == '__main__':
twoSum = Solution()
print(twoSum.two_sum([3, 2, 4], 6))
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
Crisil question test cases.
Author: Ikram Ulhaq
"""
import unittest
from solutions import *
class Testing(unittest.TestCase):
def setUp(self):
self.movavg = movavg
self.orangePurchase1 = orangePurchase1
self.orangePurchase2 = orangePurchase2
self.orangePurchase3 = orangePurchase3
def test_movavg_100randomnumbers_over5(self):
"""Run standard moving average graph for 100 numbers over 5 pts to avg:ok"""
f = [value for value in np.random.randint(0, 100, size=50)]
self.assertEqual( self.movavg(f,5), "ok")
def test_movavg_100randomnumbers_invalidAvgValues(self):
"""Run standard moving average graph for 100 numbers use invalid avg values:Exception"""
f = [value for value in np.random.randint(0, 100, size=50)]
self.assertEqual( self.movavg(f,"x"), "Exception")
def test_movavg_100randomnumbers_invalidDataSet(self):
"""Run standard moving average graph for 100 numbers use invalid dataset:Exception"""
f = [100,200,300,400,'x']
self.assertEqual( self.movavg(f,5), "Exception")
def test_orangePurchase1_0(self):
"""Run orangePurchase1 test for money value of 0"""
self.assertEqual( self.orangePurchase1(0), 0)
def test_orangePurchase1_1(self):
"""Run orangePurchase1 test for money value of 1"""
self.assertEqual( self.orangePurchase1(1), 1)
def test_orangePurchase1_1000(self):
"""Run orangePurchase1 test for money value of 1000"""
self.assertEqual( self.orangePurchase1(1000), 44)
def test_orangePurchase2_0(self):
"""Run orangePurchase2 test for money value of 0"""
self.assertEqual( self.orangePurchase2(0), 0)
def test_orangePurchase1_2(self):
"""Run orangePurchase2 test for money value of 1"""
self.assertEqual( self.orangePurchase2(1), 1)
def test_orangePurchase2_1000(self):
"""Run orangePurchase2 test for money value of 1000"""
self.assertEqual( self.orangePurchase2(1000), 9)
def test_orangePurchase3_0_PriceFunction1(self):
"""Run orangePurchase3 test for money value of 0"""
priceFunction1 = lambda x: x
self.assertEqual( self.orangePurchase3(0,priceFunction1), 0)
def test_orangePurchase3_2_PriceFunction1(self):
"""Run orangePurchase3 test for money value of 1"""
priceFunction1 = lambda x: x
self.assertEqual( self.orangePurchase3(1,priceFunction1), 1)
def test_orangePurchase3_1000_PriceFunction1(self):
"""Run orangePurchase3 test for money value of 1000"""
priceFunction1 = lambda x: x
self.assertEqual( self.orangePurchase3(1000,priceFunction1), 44)
def test_orangePurchase3_0_PriceFunction2(self):
"""Run orangePurchase3 test for money value of 0"""
priceFunction2 = lambda x: 1 * 2 ** (x - 1)
self.assertEqual( self.orangePurchase3(0,priceFunction2), 0)
def test_orangePurchase3_2_PriceFunction2(self):
"""Run orangePurchase3 test for money value of 1"""
priceFunction2 = lambda x: 1 * 2 ** (x - 1)
self.assertEqual( self.orangePurchase3(1,priceFunction2), 1)
def test_orangePurchase3_1000_PriceFunction2(self):
"""Run orangePurchase3 test for money value of 1000"""
priceFunction2 = lambda x: 1 * 2 ** (x - 1)
self.assertEqual( self.orangePurchase3(1000,priceFunction2), 9)
if __name__ == '__main__':
unittest.main()
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def movavg(f,w):
"""
Function which returns the moving average of a randomly generated set of values f, where w is the number of values over which to average.
Plot the result together with the original values.
:param f: numpy array of random integers between 0 and 100
:param w: number of values to average over
:return: ok if processed, exception if invalid input
"""
if not isinstance(w, int):
print('number of values to average over is not an integer')
return 'Exception'
dataset={}
dataset['datapoints'] = [posn for posn in range(len(f))]
dataset['randomdata'] = f
df = pd.DataFrame(dataset)
# Create on the fly key and label
ma_key = 'SMA_' + str(w)
ma_label = 'SMA ' + str(w) + ' Months'
# Work out moving Average based on number of values to average over
try:
df[ma_key] = df.iloc[:, 1].rolling(window=w).mean()
except:
print("DataFrame could not be generated - invalid data set")
return 'Exception'
plt.plot(df['randomdata'], linestyle='--', marker='.', label='Original Data')
plt.plot(df[ma_key], marker='o', linewidth=3, label=ma_label)
plt.xlabel('Data Point')
plt.ylabel('Moving Average')
plt.title("Moving Average over Data Points")
plt.legend(loc=2)
plt.grid(True)
plt.tight_layout()
plt.show()
return "ok"
def orangePurchase1(m):
"""
Calculate how many oranges can be bought with a set amount of
money. The first orange costs 1, and each subsequent costs 1 more than the previous
(the second costs 2, the third costs 3, and so on).
:param m:total amount of money available (nb m<2,147,483,647)
:return:total number of oranges which can be purchased
"""
if m in [0, 1]:
return m
total = 0
#first term in series
value = 1
#difference between each term
difference=1
#calculate sum of arithmetic progression of prices until money limit is broken
for number_of_oranges in range(m):
total = total + value
value = value + difference
if total > m:
break
return number_of_oranges
def orangePurchase2(m):
"""
Calculate how many oranges can be bought with a set amount of
money. The first orange costs 1, and each subsequent exponentially costs more than the previous
(the second costs 2, the third costs 4, and so on).
:param m:total amount of money available (nb m<2,147,483,647)
:return:total number of oranges which can be purchased
"""
if m in [0, 1]:
return m
total = 0
#first term in series
value = 1
#calculate sum of Geometric sequence of prices until money limit is broken
for number_of_oranges in range(0, m):
total = total + value
value = 2 ** number_of_oranges-1
if total == m:
return number_of_oranges
elif total>m:
#Current total breaks the money limit, hence use previous orange count which didnt break limit
return number_of_oranges-1
def orangePurchase3(m,priceFunction):
"""
Calculate number of oranges that can be purchased for quantity of money m
given the (user-defined) price function priceFunction for each orange.
:param m:total amount of money available (nb m<2,147,483,647)
:param priceFunction: points to a pricer function = price of nth orange
:return:total number of oranges which can be purchased
"""
if m in [0, 1]:
return m
total = 0
no_of_oranges = 1
while total <= m:
total = total + priceFunction(no_of_oranges)
if total == m:
return no_of_oranges
elif total > m:
# Current total breaks the money limit, hence use previous orange count which didnt break limit
return no_of_oranges - 1
no_of_oranges=no_of_oranges+1
return no_of_oranges
print(orangePurchase1(3))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
import os
import pathlib
import re
import ssl
import time
from enum import Enum
from typing import Dict, Optional
import mutagen
import mutagen.easyid3
import requests
import requests.exceptions
from podcastdownloader.exceptions import EpisodeException
class Status(Enum):
blank = 0
pending = 1
downloaded = 2
corrupted = 3
max_attempts = 10
def _rate_limited_request(url: str, head_only: bool) -> requests.Response:
url = url.strip()
attempts = 1
global max_attempts
while True:
try:
if head_only:
response = requests.head(url, timeout=180, allow_redirects=True)
else:
response = requests.get(url, timeout=180, allow_redirects=True)
return response
except (requests.exceptions.RequestException, ssl.SSLError) as e:
if attempts > max_attempts:
raise EpisodeException('Connection was limited/refused: {}'.format(e))
time.sleep(30 * attempts)
attempts += 1
class Episode:
def __init__(self, feed_dict: Dict, podcast: str):
self.feed_entry = feed_dict
self.podcast = podcast
self.status = Status.blank
self.download_link = None
self.size = None
def parseRSSEntry(self):
self.title = re.sub(r'(/|\0)', '', self.feed_entry['title'])
if 'links' in self.feed_entry:
for link in self.feed_entry['links']:
if 'type' in link and re.match('audio*', link['type']):
self.download_link = link['href']
self.file_type = link['type']
break
elif 'link' in self.feed_entry:
self.download_link = self.feed_entry['link']
self.file_type = None
else:
self.download_link = None
if not self.download_link:
raise EpisodeException(
'No download link found for episode {} in podcast {}'.format(
self.title, self.podcast))
if not self.file_type:
r = _rate_limited_request(self.download_link, True)
self.file_type = r.headers['content-type']
r.close()
self.status = Status.pending
def calcPath(self, dest_folder: pathlib.Path):
intended_path = pathlib.Path(dest_folder, self.podcast)
self.path = None
if self.file_type == 'audio/mp4' or self.file_type == 'audio/x-m4a':
self.path = pathlib.Path(intended_path, self.title + '.m4a')
elif self.file_type == 'audio/mpeg' or self.file_type == 'audio/mp3':
self.path = pathlib.Path(intended_path, self.title + '.mp3')
if self.path is None:
raise EpisodeException('Cannot determine filename with codec {}'.format(self.file_type))
def _get_download_size(self):
r = _rate_limited_request(self.download_link, True)
self.size = int(r.headers['content-length'])
def verifyDownload(self):
self._get_download_size()
if self.path.exists():
found_size = self.path.stat().st_size
# set the tolerance as a percent of the filesize
if abs(found_size - self.size) >= (self.size * 0.02):
self.status = Status.corrupted
def checkExistence(self):
if os.path.exists(self.path) is True:
self.status = Status.downloaded
def downloadContent(self):
content = _rate_limited_request(self.download_link, False).content
with open(self.path, 'wb') as episode_file:
episode_file.write(content)
self.status = Status.downloaded
|
nilq/baby-python
|
python
|
#!/usr/bin/python3
import argparse
import csv
import os
import re
import sqlite3
from sqlite3 import Error
#import sys
DB_FOLDER = "database"
DB_FILE = "boxiot.db"
DB_SCHEMA = "schema.sql"
CSV_FOLDER = "database/csv"
CSV_ACTIONS = "actions.csv"
CSV_COMBINATIONS = "combinations.csv"
# DB_TABLE_ACTION_TYPES = "ActionTypes"
# DB_TABLE_ACTION = "Actions"
# DB_TABLE_COMBINATION = "Combinations"
# DB_TABLE_COMBINATION_ACTIONS = "CombinationActions"
#region database
def dict_factory(cursor, row):
dict = {}
for idx, col in enumerate(cursor.description):
dict[col[0]] = row[idx]
return dict
def create_connection(database_file):
connection = None
try:
connection = sqlite3.connect(database_file)
connection.row_factory = dict_factory
except Error as e:
print(e)
return connection
def execute_script(connection, sql_script):
cursor = connection.cursor()
cursor.executescript(sql_script)
connection.commit()
def get_action_type(connection, action):
sql = '''
SELECT
*
FROM
ActionTypes
WHERE
Name = :ActionType
'''
cursor = connection.cursor()
cursor.execute(sql, action)
return next(cursor, None)
def insert_action_type(connection, action):
sql = '''
INSERT INTO
ActionTypes
(Name)
VALUES
(:ActionType)
'''
cursor = connection.cursor()
cursor.execute(sql, action)
return cursor.lastrowid
def upsert_action_type(connection, action):
db_action_type = get_action_type(connection, action)
if db_action_type == None:
Id = insert_action_type(connection, action)
else:
Id = db_action_type["Id"]
action["ActionTypeId"] = Id
def get_action(connection, action):
sql = '''
SELECT
*
FROM
Actions
WHERE
Symbol = :Symbol
'''
cursor = connection.cursor()
cursor.execute(sql, action)
return next(cursor, None)
def insert_action(connection, action):
sql = '''
INSERT INTO
Actions
(Symbol, Text, ActionTypeId)
VALUES
(:Symbol, :Text, :ActionTypeId)
'''
cursor = connection.cursor()
cursor.execute(sql, action)
return cursor.lastrowid
def update_action(connection, action):
sql = '''
UPDATE
Actions
SET
Text = :Text
, ActionTypeId = :ActionTypeId
WHERE
Id = :Id
'''
cursor = connection.cursor()
cursor.execute(sql, action)
def upsert_action(connection, action):
upsert_action_type(connection, action)
db_action = get_action(connection, action)
if db_action == None:
insert_action(connection, action)
else:
action['Id'] = db_action['Id']
update_action(connection, action)
def get_combination(connection, combination_actions):
sql = '''
SELECT
*
FROM
Combinations
WHERE
Pattern = :Pattern
'''
cursor = connection.cursor()
cursor.execute(sql, combination_actions)
return next(cursor, None)
def insert_combination(connection, combination_actions):
sql = '''
INSERT INTO
Combinations
(Pattern, Text, ActionCount)
VALUES
(:Pattern, :Text, :ActionCount)
'''
cursor = connection.cursor()
cursor.execute(sql, combination_actions)
return cursor.lastrowid
def update_combination(connection, combination_actions):
sql = '''
UPDATE
Combinations
SET
Text = :Text
WHERE
Id = :Id
'''
cursor = connection.cursor()
cursor.execute(sql, combination_actions)
def get_combination_action(connection, combination_action):
sql = '''
SELECT
*
FROM
CombinationActions
WHERE
CombinationId = :CombinationId
AND ActionId = :ActionId
AND Sequence = :Sequence
AND SubSequence = :SubSequence
'''
cursor = connection.cursor()
cursor.execute(sql, combination_action)
return next(cursor, None)
def insert_combination_action(connection, combination_action):
sql = '''
INSERT INTO
CombinationActions
(CombinationId, ActionId, Sequence, SubSequence)
VALUES
(:CombinationId, :ActionId, :Sequence, :SubSequence)
'''
cursor = connection.cursor()
cursor.execute(sql, combination_action)
return cursor.lastrowid
def upsert_combination(connection, combination_actions):
# upsert_combination_type(connection, combination_actions)
db_combination = get_combination(connection, combination_actions)
if db_combination == None:
combination_actions["Id"] = insert_combination(connection, combination_actions)
else:
combination_actions['Id'] = db_combination['Id']
update_combination(connection, combination_actions)
#endregion database
#region import
regex = re.compile(r"([\[\(\{\/\<]){0,1}([0-9]{0,1})([a-z]{0,5})([\]\)\}\/\>]{0,1})")
def add_action(combination_actions, action, sequence, sub_sequence):
combination_actions["Actions"].append({ "ActionId": action["Id"], "Sequence": sequence, "SubSequence": sub_sequence })
if sub_sequence == 1:
combination_actions["Text"].append(action["Text"])
else:
combination_actions["Text"][-1] += " " + action["Text"]
def convert_combination(combination, actions):
pattern = combination["Combination"]
type = combination["CombinationType"]
combination_actions = { "Pattern": pattern, "CombinationType": type, "Actions": [], "Text": [] }
sequence = 1
for action in pattern.split("-"):
match = regex.match(action)
if match == None:
continue
sub_sequence = 1
if match.group(1):
symbol = match.group(1) + match.group(4)
add_action(combination_actions, actions[symbol], sequence, sub_sequence)
sub_sequence += 1
if match.group(2):
symbol = match.group(2)
add_action(combination_actions, actions[symbol], sequence, sub_sequence)
sub_sequence += 1
if match.group(3):
symbol = match.group(3)
add_action(combination_actions, actions[symbol], sequence, sub_sequence)
sequence += 1
combination_actions["ActionCount"] = len(combination_actions["Text"])
combination_actions["Text"] = ", ".join(combination_actions["Text"])
return combination_actions
def upsert_combination_actions(connection, combination_actions):
upsert_combination(connection, combination_actions)
for combination_action in combination_actions["Actions"]:
combination_action["CombinationId"] = combination_actions["Id"]
db_combination_action = get_combination_action(connection, combination_action)
if db_combination_action == None:
insert_combination_action(connection, combination_action)
#endregion
#region general
def get_file_content(file):
with open(file, "r", encoding="UTF-8") as f:
return f.read()
#endregion
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--database", default=DB_FILE)
parser.add_argument("--create", action="store_true")
parser.add_argument("--import-csv", action="store_true")
parser.add_argument("-f", "--force", action="store_true")
args = parser.parse_args()
args.database = os.path.join(DB_FOLDER, args.database)
if args.create: # TODO: replace with --script [array]
if os.path.isfile(args.database):
if not args.force:
print(f"database {args.database} already exists")
quit()
os.remove(args.database)
connection = create_connection(args.database)
with connection:
# TODO: simply run all numbered files in init folder
for sql_script_file in [DB_SCHEMA]: #, "actiontypes.sql", "actions.sql", "combinations.sql", "combinationactions.sql"]:
sql_script = get_file_content(os.path.join(DB_FOLDER, sql_script_file))
execute_script(connection, sql_script)
if args.import_csv:
actions = {}
with open(os.path.join(CSV_FOLDER, CSV_ACTIONS), encoding="UTF-8") as csv_file:
actions_reader = csv.DictReader(csv_file, delimiter=",", quotechar="\"")
connection = create_connection(args.database)
with connection:
for action in actions_reader:
upsert_action(connection, action)
actions[action["Symbol"]] = action
connection.commit()
with open(os.path.join(CSV_FOLDER, CSV_COMBINATIONS), encoding="UTF-8") as csv_file:
combinations_reader = csv.DictReader(csv_file, delimiter=",", quotechar="\"")
connection = create_connection(args.database)
with connection:
for combination in combinations_reader:
combination_actions = convert_combination(combination, actions)
upsert_combination_actions(connection, combination_actions)
connection.commit()
|
nilq/baby-python
|
python
|
from django.test import TestCase
from recipe import models
class ModelTests(TestCase):
def test_recipe_str(self):
"""Test that the string representation of recipe is correct"""
recipe = models.Recipe.objects.create(
name='Test Recipe',
description='A recipe used for tests'
)
self.assertEqual(str(recipe), recipe.name)
def test_ingredient_str(self):
"""Test that the string representation of ingredient is correct"""
recipe = models.Recipe.objects.create(name='Banana Smoothie', description='The name says it all, really')
ingredient = models.Ingredient.objects.create(
name='Bananas',
recipe=recipe
)
self.assertEqual(str(ingredient), ingredient.name)
|
nilq/baby-python
|
python
|
from AzureRiskyUsers import Client
import json
BASE_URL = 'https://graph.microsoft.com/v1.0/'
ACCESS_TOKEN_REQUEST_URL = 'https://login.microsoftonline.com/organizations/oauth2/v2.0/token'
def load_mock_response(file_name: str) -> dict:
"""
Load one of the mock responses to be used for assertion.
Args:
file_name (str): Name of the mock response JSON file to return.
"""
with open(f'test_data/{file_name}', mode='r', encoding='utf-8') as json_file:
return json.loads(json_file.read())
def mock_client():
return Client(client_id='client_id',
verify=False,
proxy=False)
def test_risky_users_list_command(requests_mock) -> None:
"""
Scenario: List Risky Users.
Given:
- User has provided valid credentials.
- Headers and JWT token have been set.
When:
- risky_users_list_command is called.
Then:
- Ensure number of items is correct.
- Ensure outputs prefix is correct.
- Ensure outputs key fields is correct.
"""
from AzureRiskyUsers import risky_users_list_command
mock_response = load_mock_response('list_risky_users.json')
requests_mock.post(ACCESS_TOKEN_REQUEST_URL, json={})
requests_mock.get(f'{BASE_URL}identityProtection/riskyUsers', json=mock_response)
result = risky_users_list_command(mock_client(), {'limit': '20', 'page': '1'})
assert result.outputs_prefix == 'AzureRiskyUsers.RiskyUser'
assert result.outputs_key_field == 'id'
assert len(result.raw_response) == 3
def test_risky_user_get_command(requests_mock) -> None:
"""
Scenario: Get Risky User.
Given:
- User has provided valid credentials.
- Headers and JWT token have been set.
When:
- risky_user_get_command is called.
Then:
- Ensure outputs prefix is correct.
- Ensure outputs key fields is correct.
- Ensure user ID is correct.
"""
from AzureRiskyUsers import risky_user_get_command
mock_response = load_mock_response('get_risky_user.json')
requests_mock.post(ACCESS_TOKEN_REQUEST_URL, json={})
requests_mock.get(f'{BASE_URL}identityProtection/riskyUsers/1', json=mock_response)
result = risky_user_get_command(mock_client(), args={'id': '1'})
assert result.outputs_prefix == 'AzureRiskyUsers.RiskyUser'
assert result.outputs_key_field == 'id'
assert result.raw_response.get('id') == '1'
def test_risk_detections_list_command(requests_mock) -> None:
"""
Scenario: List Risk Detections.
Given:
- User has provided valid credentials.
- Headers and JWT token have been set.
When:
- risk_detections_list_command is called.
Then:
- Ensure outputs prefix is correct.
- Ensure outputs key fields is correct.
- Ensure risk detection ID is correct.
"""
from AzureRiskyUsers import risk_detections_list_command
mock_response = load_mock_response('list_risk_detections.json')
requests_mock.post(ACCESS_TOKEN_REQUEST_URL, json={})
requests_mock.get(f'{BASE_URL}identityProtection/riskDetections', json=mock_response)
result = risk_detections_list_command(mock_client(), {'limit': '20', 'page': '1'})
assert result.outputs_prefix == 'AzureRiskyUsers.RiskDetection'
assert result.outputs_key_field == 'id'
assert result.raw_response.get('value')[0].get('id') == '1'
def test_risk_detection_get_command(requests_mock) -> None:
"""
Scenario: Get Risk Detection.
Given:
- User has provided valid credentials.
- Headers and JWT token have been set.
When:
- risk_detection_get_command is called.
Then:
- Ensure outputs prefix is correct.
- Ensure outputs key fields is correct.
- Ensure risk detection ID is correct.
"""
from AzureRiskyUsers import risk_detection_get_command
mock_response = load_mock_response('get_risk_detection.json')
requests_mock.post(ACCESS_TOKEN_REQUEST_URL, json={})
requests_mock.get(f'{BASE_URL}identityProtection/riskDetections/1', json=mock_response)
result = risk_detection_get_command(mock_client(), args={'id': '1'})
assert result.outputs_prefix == 'AzureRiskyUsers.RiskDetection'
assert result.outputs_key_field == 'id'
assert result.raw_response.get('value')[0].get('id') == '1'
def test_build_query_filter() -> None:
"""
Scenario: Build query filter for API call.
Given:
- Provided valid arguments.
When:
- build_query_filter function is called.
Then:
- Ensure results are valid.
"""
from AzureRiskyUsers import build_query_filter
result = build_query_filter(risk_state='dismissed', risk_level='medium')
assert result == "riskState eq 'dismissed' and riskLevel eq 'medium'"
def test_get_skip_token() -> None:
"""
Scenario: Get skip token.
Given:
- Provided valid arguments.
When:
- get_skip_token function is called.
Then:
- Ensure results are valid.
"""
from AzureRiskyUsers import get_skip_token
result = get_skip_token(next_link=None,
outputs_prefix='AzureRiskyUsers.RiskyUser',
outputs_key_field='id',
readable_output='test')
assert result.outputs_prefix == 'AzureRiskyUsers.RiskyUser'
assert result.outputs_key_field == 'id'
assert result.readable_output == 'test'
|
nilq/baby-python
|
python
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from .operations import mesh_split_face
__all__ = [
'mesh_quads_to_triangles',
]
def mesh_quads_to_triangles(mesh, check_angles=False):
"""Convert all quadrilateral faces of a mesh to triangles by adding a diagonal edge.
Parameters
----------
mesh : :class:`~compas.datastructures.Mesh`
A mesh data structure.
check_angles : bool, optional
Flag indicating that the angles of the quads should be checked to choose the best diagonal.
Returns
-------
None
The mesh is modified in place.
"""
for fkey in list(mesh.faces()):
attr = mesh.face_attributes(fkey)
attr.custom_only = True
vertices = mesh.face_vertices(fkey)
if len(vertices) == 4:
a, b, c, d = vertices
t1, t2 = mesh_split_face(mesh, fkey, b, d)
mesh.face_attributes(t1, attr.keys(), attr.values())
mesh.face_attributes(t2, attr.keys(), attr.values())
# mesh.facedata[t1] = attr.copy()
# mesh.facedata[t2] = attr.copy()
if fkey in mesh.facedata:
del mesh.facedata[fkey]
|
nilq/baby-python
|
python
|
from .rpg_object import RPGObject
class Class(RPGObject):
config_filename = "classes.yaml"
|
nilq/baby-python
|
python
|
from unittest import TestCase
from dragonfly.template.template import Converter
import importlib
class TestTemplate(TestCase):
def test_convert(self):
res = Converter('test.html').convert()
def test_erroneous_if(self):
res = Converter('if_error.html').convert()
with open('if_error.py', 'w+') as f:
f.truncate(0)
f.writelines(res)
with self.assertRaises(SyntaxError):
html = importlib.import_module("if_error").get_html(var=1)
def test_erroneous_for(self):
res = Converter('for_error.html').convert()
with open('for_error.py', 'w+') as f:
f.truncate(0)
f.writelines(res)
with self.assertRaises(KeyError):
arg_dict = {'items': [1, 2, 3]}
html = importlib.import_module("for_error").get_html(arg_dict)
|
nilq/baby-python
|
python
|
from checkio.electronic_station.roman_numerals import checkio
def test_checkio():
assert checkio(6) == "VI", "6"
assert checkio(76) == "LXXVI", "76"
assert checkio(499) == "CDXCIX", "499"
assert checkio(3888) == "MMMDCCCLXXXVIII", "3888"
def test_checkio_extra_all_small():
assert checkio(1) == "I"
assert checkio(2) == "II"
assert checkio(3) == "III"
assert checkio(4) == "IV"
assert checkio(5) == "V"
assert checkio(6) == "VI"
assert checkio(7) == "VII"
assert checkio(8) == "VIII"
assert checkio(9) == "IX"
def test_checkio_extra():
assert checkio(25) == "XXV"
|
nilq/baby-python
|
python
|
# Copyright (c) 2016-2019 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import logging
from ...listener import EventListener
logger = logging.getLogger(__name__)
class CliListener(EventListener):
def __init__(self):
super().__init__(None)
def new_fuzz_job(self, ident, cost, sut, fuzzer, batch):
logger.debug('#%s: New fuzzer job for %s to %s (%s tests).', ident, fuzzer, sut, batch)
def new_update_job(self, ident, cost, sut):
logger.debug('#%s: New update job for %s.', ident, sut)
def new_reduce_job(self, ident, cost, sut, issue_id, size):
logger.debug('#%s: New reduce job for %r in %s (%s bytes).', ident, issue_id, sut, size)
def new_validate_job(self, ident, cost, sut, issue_id):
logger.debug('#%s: New validate job for %r in %s.', ident, issue_id, sut)
def activate_job(self, ident):
logger.debug('#%s: Activate job.', ident)
def remove_job(self, ident):
logger.debug('#%s: Remove job.', ident)
def warning(self, ident, msg):
if ident is not None:
logger.warning('#%s: %s', ident, msg)
else:
logger.warning(msg)
def new_issue(self, ident, issue):
logger.info('#%s: New issue %r in %s.', ident, issue['id'], issue['sut'])
def update_issue(self, ident, issue):
logger.info('#%s: Updated issue %r in %s.', ident, issue['id'], issue['sut'])
def invalid_issue(self, ident, issue):
logger.debug('#%s: Invalid issue %r in %s.', ident, issue['id'], issue['sut'])
def reduced_issue(self, ident, issue):
logger.debug('#%s: Reduced issue %r in %s.', ident, issue['id'], issue['sut'])
|
nilq/baby-python
|
python
|
#!/usr/bin/python3
def magic_string(repit=[-1]):
repit[0] += 1
return "Holberton, " * repit[0] + "Holberton"
|
nilq/baby-python
|
python
|
def flip_word(s, start, end):
l = (end - start) // 2
for i in range(l):
s[start + i], s[end - i - 1] = s[end - i - 1], s[start + i]
def solution(s):
l = len(s)
for i in range(len(s) // 2):
s[i], s[l - 1 - i] = s[l - 1 - i], s[i]
start = 0
for i in range(len(s) + 1):
if i == len(s) or s[i] == " ":
flip_word(s, start, i)
start = i + 1
if start > len(s):
return
s = list("the sky is blue")
solution(s)
print(s)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 The Project U-Ray Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
""" Database files available for a tile type. """
from collections import namedtuple
import json
from utils import lib
from utils.timing import fast_slow_tuple_to_corners, RcElement
TileDbs = namedtuple('TileDbs',
'segbits block_ram_segbits ppips mask tile_type')
class OutPinTiming(namedtuple('OutPinTiming', 'delays drive_resistance')):
""" Timing for site output pins.
Attributes
----------
delays : dicts of PvtCorner to IntristicDelay
Intristic delay of output pin.
drive_resistance : float
Resistance of drive output pin (milliOhms).
"""
pass
class InPinTiming(namedtuple('InPinTiming', 'delays capacitance')):
""" Timing for site input pins.
Attributes
----------
delays : dicts of PvtCorner to IntristicDelay
Intristic delay of input pin.
capacitance : float
Capacitance of input pints (microFarads).
"""
pass
class PipTiming(
namedtuple('PipTiming',
'delays drive_resistance internal_capacitance')):
""" Timing for pips.
Attributes
----------
delays : dicts of PvtCorner to IntristicDelay
Intristic delay of pip.
internal_capacitance : float
Capacitance (microFarads) of pip (which is only seen if pip is used).
drive_resistance : float
Resistance of drive output pin (milliOhms).
"""
pass
class Pip(
namedtuple(
'Pip',
('name', 'net_to', 'net_from', 'can_invert', 'is_directional',
'is_pseudo', 'is_pass_transistor', 'timing', 'backward_timing'))):
""" Pip information.
Attributes
----------
name : str
Name of pip
net_to : str
Name of output tile wire when pip is unidirectional.
net_from: str
Name of input tile wire when pip is unidirectional.
can_invert : bool
Can this pip invert the signal.
is_directional : bool
True if this pip is unidirectional, False if this pip is
unidirectional.
is_pseudo : bool
True if this pip is mark as a pseudo-pip.
is_pass_transistor : bool
True if this pip is non-isolating.
timing : PipTiming
Timing used when connecting net_from to net_to. This is the only
timing used when a pip is unidirectional.
May be None if timing information is not present in the database.
backward_timing : PipTiming
Timing used when connecting net_to to net_from. This is only used
if the pip is bidirectional.
May be None if timing information is not present in the database.
"""
pass
class Site(namedtuple('Site', 'name prefix x y type site_pins')):
""" Represents an instance of a site within a tile.
Attributes
----------
name : str
Name of site within tile, instance specific.
prefix : str
Prefix of site naming in Xilinx parlance.
type : str
What type of slice this instance presents.
site_pins : list of SitePin
Instaces of site pins within this site and tile. This is an tuple of
SitePin tuples, and is specific to this instance of the site within
the tile.
"""
class SitePin(namedtuple('SitePin', 'name wire timing')):
""" Tuple representing a site pin within a tile.
Sites are generic based on type, however sites are instanced
within a tile 1 or more times. The SitePin contains both site type generic
information and tile type specific information.
Attributes
----------
name : str
Site type specific name. This name is expected to be the same for
all sites of the same type.
wire : str
Wire name within the tile. This name is site instance specific.
timing : Either InPinTiming or OutPinTiming
Timing of site pin. May be None if database lacks timing information.
"""
WireInfo = namedtuple('WireInfo', 'pips sites')
# Conversion factor from database to internal units.
RESISTANCE_FACTOR = 1e3
CAPACITANCE_FACTOR = 1e3
def get_pip_timing(pip_timing_json):
""" Convert pip_timing_json JSON into PipTiming object.
Returns
-------
If timing information is not present for this pip, returns None.
If timing information is present, returns PipTiming. Some fields may be
None if the pip type lacks that field.
"""
if pip_timing_json is None:
return None
delays = None
if pip_timing_json.get('delay') is not None:
delays = fast_slow_tuple_to_corners(pip_timing_json.get('delay'))
in_cap = pip_timing_json.get('in_cap')
if in_cap is not None:
in_cap = float(in_cap) / CAPACITANCE_FACTOR
else:
in_cap = 0
res = pip_timing_json.get('res')
if res is not None:
res = float(res) / RESISTANCE_FACTOR
else:
res = 0
return PipTiming(
delays=delays,
drive_resistance=res,
internal_capacitance=in_cap,
)
def get_site_pin_timing(site_pin_info):
""" Convert site_pin_info JSON into InPinTiming or OutPinTiming object.
Returns
-------
If timing information is not present for this site pin, returns None.
If this is an output pin, returns OutPinTiming.
If this is an input pin, returns InPinTiming.
"""
if site_pin_info is None:
return None
if isinstance(site_pin_info, str):
return site_pin_info, None
wire = site_pin_info['wire']
if 'delay' not in site_pin_info:
return None
delays = fast_slow_tuple_to_corners(site_pin_info['delay'])
if 'cap' in site_pin_info:
assert 'res' not in site_pin_info
return wire, InPinTiming(
delays=delays,
capacitance=float(site_pin_info['cap']) / CAPACITANCE_FACTOR,
)
else:
assert 'res' in site_pin_info
return wire, OutPinTiming(
delays=delays,
drive_resistance=float(site_pin_info['res']) / RESISTANCE_FACTOR,
)
def get_wires(wires):
""" Converts database input to dictionary of tile wires to wire timing.
Returns dictionary of tile wire name to RcElement or None. """
if isinstance(wires, list):
# Handle old database gracefully.
return {wire: None for wire in wires}
output = {}
for wire, rc_json in wires.items():
if rc_json is None or 'res' not in rc_json:
output[wire] = RcElement(
resistance=0,
capacitance=0,
)
else:
output[wire] = RcElement(
resistance=float(rc_json['res']) / RESISTANCE_FACTOR,
capacitance=float(rc_json['cap']) / CAPACITANCE_FACTOR,
)
return output
def is_pass_transistor(pip_json):
""" Returns boolean if pip JSON indicates pip is a pass transistor.
Always returns False if database lacks this information.
"""
if 'is_pass_transistor' in pip_json:
return bool(int(pip_json['is_pass_transistor']))
else:
return False
class Tile(object):
""" Provides abstration of a tile in the database. """
def __init__(self, tilename, tile_dbs):
self.tilename = tilename
self.tilename_upper = self.tilename.upper()
self.tile_dbs = tile_dbs
self.wires = None
self.sites = None
self.pips = None
self.pips_by_name = {}
def yield_sites(sites):
for site in sites:
site_pins = []
for name, site_pin_info in site['site_pins'].items():
data = get_site_pin_timing(site_pin_info)
if data is not None:
wire, timing = data
site_pins.append(
SitePin(
name=name,
wire=wire,
timing=timing,
))
else:
site_pins.append(
SitePin(
name=name,
wire=None,
timing=None,
))
yield Site(
name=site['name'],
prefix=site['prefix'],
type=site['type'],
x=site['x_coord'],
y=site['y_coord'],
site_pins=site_pins,
)
def yield_pips(pips):
for name, pip in pips.items():
yield Pip(
name=name,
net_to=pip['dst_wire'],
net_from=pip['src_wire'],
can_invert=bool(int(pip['can_invert'])),
is_directional=bool(int(pip['is_directional'])),
is_pseudo=bool(int(pip['is_pseudo'])),
is_pass_transistor=is_pass_transistor(pip),
timing=get_pip_timing(pip.get('src_to_dst')),
backward_timing=get_pip_timing(pip.get('dst_to_src')),
)
with open(self.tile_dbs.tile_type) as f:
tile_type = json.load(f)
assert self.tilename_upper == tile_type['tile_type']
self.wires = get_wires(tile_type['wires'])
self.sites = tuple(yield_sites(tile_type['sites']))
self.pips = tuple(yield_pips(tile_type['pips']))
self.wire_info = {}
def get_wires(self):
"""Returns a set of wire names present in this tile."""
return self.wires
def get_sites(self):
""" Returns tuple of Site namedtuple's present in this tile. """
return self.sites
def get_pips(self):
""" Returns tuple of Pip namedtuple's representing the PIPs in this tile.
"""
return self.pips
def get_pip_by_name(self, name):
if len(self.pips_by_name) == 0:
for pip in self.pips:
self.pips_by_name[pip.name] = pip
return self.pips_by_name[name]
def get_wire_info(self, target_wire, allow_pseudo=False):
if len(self.wire_info) == 0:
for wire in self.wires:
pips = list()
sites = list()
for site in self.sites:
for site_pin in site.site_pins:
if site_pin.wire == wire:
sites.append((site.name, site_pin.name))
for pip in self.pips:
pseudo_filter = (not pip.is_pseudo) or allow_pseudo
if (wire == pip.net_to
or wire == pip.net_from) and pseudo_filter:
pips.append(pip.name)
assert wire not in self.wire_info
self.wire_info[wire] = WireInfo(pips=pips, sites=sites)
return self.wire_info[target_wire]
def get_instance_sites(self, grid_info):
""" get_sites returns abstract sites for all tiles of type.
get_instance_sites converts site info from generic to specific
based on a tile location.
"""
site_names = set()
for site in self.sites:
site_name = '{}_X{}Y{}'.format(site.prefix, site.x, site.y)
origin_x, origin_y = lib.find_origin_coordinate(
site_name, grid_info.sites.keys())
x = site.x + origin_x
y = site.y + origin_y
site_name = '{}_X{}Y{}'.format(site.prefix, x, y)
if site_name not in grid_info.sites:
type_count = 0
for site_name_from_grid, site_type in grid_info.sites.items():
if site.type == site_type:
type_count += 1
site_name = site_name_from_grid
assert type_count == 1, (site_name, type_count)
site_names.add(site_name)
assert site.type == grid_info.sites[site_name]
yield Site(
name=site_name,
prefix=site.prefix,
type=site.type,
x=x,
y=y,
site_pins=site.site_pins,
)
assert site_names == set(grid_info.sites.keys())
def get_other_wire_from_pip(pip, wire):
if wire == pip.net_to:
return pip.net_from
elif wire == pip.net_from:
return pip.net_to
else:
assert False, (pip, wire)
|
nilq/baby-python
|
python
|
"""
Vishnu... Thank you for electronics.
Author: Manas Kumar Mishra
Task:- D(3) D--> Decimal number system.
"""
"""
Task :Apply the optical flow algorithm with shi-tumasi to define the motion path and
print that is there any change in the person or image is is motion or not.
"""
"""
Theory:-
Optical flow technique is for the motion tracking through the pixcel level analysis.
Basically it will generate the pattern of apparent motion of the objects in the image
by analysing the two consecutive frames.
There are few assumption which I am making:
1. There is not lighting intensity change.
2. There is no shadow of the object otherwise it will consider that shadow as an
another object.
3. No Rotional motion on the object otherwise it can't detect that motion. Like a sphere revolving around
it own axis.
In cv2 we have well define function/method for optical flow or KL tracking. Where it is using the
shi-tomasi corner detection and pyramid techniques for tracing the motion.
"""
import numpy as np
import cv2
cap = cv2.VideoCapture(0)
# For shi-tumasi corner detection
feature_param = dict(maxCorners =500,
qualityLevel=0.3,
minDistance = 7,
blockSize =7)
# KL parameters definations
lk_param = dict(winSize=(15,15),
maxLevel = 2,
criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03))
# color = np.random.randint(0, 255, (100,3))
# Take first image as initial frame
ret, oldframe = cap.read()
oldgray = cv2.cvtColor(oldframe,cv2.COLOR_BGR2GRAY)
p0 = cv2.goodFeaturesToTrack(oldgray, mask = None, **feature_param)
mask =np.zeros_like(oldframe)
while True:
ret, newframe = cap.read()
framegray = cv2.cvtColor(newframe, cv2.COLOR_BGR2GRAY)
if p0 is None:
p0 = cv2.goodFeaturesToTrack(framegray, mask =None, **feature_param)
# Now apply the optical flow inbuilt function of the opencv
p1, st, err = cv2.calcOpticalFlowPyrLK(oldgray, framegray, p0, None, **lk_param)
if p1 is not None and p0 is not None:
try:
goodnew = p1[st==1]
goodold = p0[st==1]
# Now consider the difference of the tracking point positions such that we can define the motion.
diffpos = goodnew - goodold
# Now compare the elements of the array to thnumber
# 2.2 is a threshold value after that I will declear the motion
comp1 = (diffpos < -1.2).astype(int)
comp2 = (diffpos > 1.2 ).astype(int)
comp = np.add(comp1,comp2)
# compare all elements of the array to null array.
is_all_zero = np.all((comp == 0))
if is_all_zero:
# print("No motion")
cv2.putText(newframe, 'No motion',(50, 50), cv2.FONT_HERSHEY_SIMPLEX, .5, (0, 0, 255), 2, cv2.LINE_AA)
# print(is_all_zero)
else:
# print("Motion")
cv2.putText(newframe, 'Motion',(50,50), cv2.FONT_HERSHEY_SIMPLEX, .5, (0, 255, 0), 2, cv2.LINE_AA)
except ValueError:
pass
for i,(new, old) in enumerate(zip(goodnew, goodold)):
a,b = new.ravel()
c,d = new.ravel()
mask = cv2.line(mask, (a,b), (c,d),(0,255,0), 2)
# newframe = cv2.circle(newframe, (a,b), 5, (0,255,0), -1)
newframe = cv2.arrowedLine(newframe, (a,b), (c,d), (255,255,0), 10, cv2.LINE_AA, 0, 2)
img = cv2.add(newframe, mask)
cv2.imshow('newframe', img)
oldgray = framegray.copy()
p0 = goodnew.reshape(-1,1,2)
# ino = ino+1
if cv2.waitKey(1)&0xff == ord('q'):
break
cap.release()
# Thank you
|
nilq/baby-python
|
python
|
import os
import yaml
import shlex
import re
def get_gputree_config():
"""Fetch host config from gputree configuration file if found.
Returns:
dict: The configuration dictionnary.
"""
if os.environ.get("GPUTREE_CONFIG_FILE"):
config_path = os.environ["GPUTREE_CONFIG_FILE"]
elif os.environ.get("XDG_CONFIG_HOME"):
config_path = os.path.joint(os.environ["XDG_CONFIG_HOME"], "gputree/config.yml")
else:
config_path = "~/.config/gputree/config.yml"
config_path = os.path.expanduser(config_path)
if not os.path.isfile(config_path):
return
with open(config_path, "r") as f:
config = yaml.safe_load(f)
return config
def get_ssh_config():
"""Get configuration from SSH config file.
Returns:
dict: Hosts informations from ssh config file.
Raises:
ValueError: If a line or host name in unparsable.
"""
PATH = "~/.ssh/config"
ssh_config_path = os.path.expanduser(PATH)
with open(ssh_config_path) as f:
ssh_config = {}
last_host = ""
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
match = re.match(re.compile(r"(\w+)(?:\s*=\s*|\s+)(.+)"), line)
if not match:
raise ValueError("Unparsable line {}".format(line))
key = match.group(1).lower()
value = match.group(2)
if key == "host":
try:
current_host = shlex.split(value)[0]
except ValueError:
raise ValueError("Unparsable host {}".format(value))
ssh_config[current_host] = {}
last_host = current_host
else:
if value.startswith('"') and value.endswith('"'):
value = value[1:-1]
ssh_config[last_host][key] = value
return ssh_config
def get_hosts_infos(hosts: list):
"""Fetch hosts informations.
If no host is provided, look at configuration file. The configuration file
can refer to host defined in the ssh config file.
Args:
hosts (list): List of host with format "username@hostname".
Returns:
list[dict]: List of host informations.
Raises:
ValueError: If no host is found or host pass thru cli does not match format.
"""
output = []
if not hosts:
config_hosts = get_gputree_config()
if not config_hosts:
raise ValueError("Unable to find hosts.")
hosts = config_hosts["hosts"].get("from-ssh-config", [])
output = [
{**v, "name": k}
for k, v in config_hosts["hosts"].items()
if k != "from-ssh-config"
]
ssh_config = get_ssh_config()
for host in hosts:
if host in ssh_config:
host_infos = ssh_config[host]
output.append(
{
"name": host,
"user": host_infos["user"],
"hostname": host_infos["hostname"],
}
)
continue
match = re.match(r"^([\w|\.]+)\@([\w|\.|\-]+)$", host)
if not match:
raise ValueError(
"Invalid host '{}', does not match pattern username@hostname.".format(
host
)
)
user, hostname = match.groups()
output.append({"name": hostname, "user": user, "hostname": hostname})
return output
|
nilq/baby-python
|
python
|
from django.shortcuts import render
from httptest2.testmodule import tasks
from httptest2.testmodule.forms import TestModuleForm, DisplayModuleForm
import time
import json
# Create your views here.
def display_all(request):
if request.method == 'POST':
form = DisplayModuleForm(request.POST)
if form.is_valid():
choice = form.cleaned_data['choice']
if choice == 'DB':
result = tasks.get_delivery.delay()
while not result.ready():
time.sleep(3)
d = json.loads(result.result)
lst = []
for i in d:
lst.append(i['fields'])
else:
result = tasks.get_delivery_restapi.delay()
while not result.ready():
time.sleep(3)
lst = result.result
# python serializer
# d = json.loads(result.result)
# lst = []
# for i in d:
# lst.append(i['fields'])
return render(request, 'testmodule/index.html', {'form': form, 'delivery_list': lst})
else:
form = DisplayModuleForm()
return render(request, 'testmodule/index.html', {'form': form})
def insert_all(request):
if request.method == 'POST':
form = TestModuleForm(request.POST)
if form.is_valid():
insertnumber = form.cleaned_data['insertnumber']
choice = form.cleaned_data['choice']
if choice == 'DB':
result = tasks.insert_delivery.delay(int(insertnumber))
elif choice == 'BATCH':
result = tasks.insert_delivery_restapi.delay(int(insertnumber))
elif choice == 'ONE':
result = tasks.insert_delivery_restapi_single.delay(int(insertnumber))
print result.id
else:
form = TestModuleForm()
return render(request, 'testmodule/inserttestmodel.html', {'form': form})
|
nilq/baby-python
|
python
|
import pytest
@pytest.fixture(autouse=True)
def xray(mocker):
"""
Disables AWS X-Ray
"""
mocker.patch('aws_xray_sdk.core.xray_recorder')
mocker.patch('aws_xray_sdk.core.patch_all')
@pytest.fixture(autouse=True)
def mock_boto3_client(mocker):
"""
Patches Boto3
"""
mocker.patch('boto3.client')
from boto3 import client
yield client
|
nilq/baby-python
|
python
|
import essentia
import numpy as np
from essentia.standard import *
import os
import re
class FeatureExtractor():
def __init__(self, train_folders=None, test_folders=None):
self.train_path = os.environ['IRMAS_TRAIN']
self.test_path = os.environ['IRMAS_TEST']
self.train_folders = train_folders
self.test_folders = test_folders
self.num_classes = len(train_folders)
self.train_X = None
self.test_X = None
self.train_y = None
self.test_y = None
def __get_label_from_txt(self, file_path):
"""
Reads text from file at file_path
Uses first line as label
"""
labels = []
with open(file_path, "r") as file:
for line in file:
labels.append(line.strip('\t\n'))
return labels
def __get_labels_from_name(self, file):
return re.findall(r"\[([A-Za-z0-9_]+)\]", file)
def __list_files(self, path):
return tuple(os.listdir(path))
def __extract_features(self, file, folder):
full_file_path = folder + file
# NEW
file_loader = MonoLoader(filename=full_file_path)
file_audio = file_loader()
window = Windowing(type='hann')
spectrum = Spectrum()
mfcc = MFCC()
spec_cont = SpectralContrast()
pool = essentia.Pool()
for frame in FrameGenerator(file_audio, frameSize=2048, hopSize=512, startFromZero=True):
spec = spectrum(window(frame))
# MFCC
mfcc_bands, mfcc_coeffs = mfcc(spec)
# Spectral Contrast
spec_coef, spec_valley = spec_cont(spec)
# Save
pool.add('lowlevel.mfcc', mfcc_coeffs)
pool.add('lowlevel.mfcc_bands', mfcc_bands)
pool.add('lowlevel.spec', spec_coef)
# OLD
# file_loader = MonoLoader(filename=full_file_path)
# frameCutter = FrameCutter(frameSize=1024, hopSize=512)
# w = Windowing(type='hann')
# spec = Spectrum()
# specCont = SpectralContrast()
# mfcc = MFCC()
# pool = essentia.Pool()
# file_loader.audio >> frameCutter.signal
# frameCutter.frame >> w.frame >> spec.frame
# spec.spectrum >> mfcc.spectrum
# mfcc.bands >> (pool, 'lowlevel.mel_bands')
# mfcc.mfcc >> (pool, 'lowlevel.mfcc')
# essentia.run(file_loader)
return pool['lowlevel.mfcc'], pool['lowlevel.mfcc_bands'], pool['lowlevel.spec']
def load_training_data(self):
"""
Reads trainPath and tainFolders to parse traning files
"""
data = np.empty((0, 59))
labels = np.empty((0, self.num_classes))
for folder in self.train_folders:
files_in_folder = self.__list_files(self.train_path + folder)
for file in files_in_folder:
file_label = self.__get_labels_from_name(file)
for label in list(file_label):
if label + "/" in self.train_folders:
continue
else:
file_label.remove(label)
while len(file_label) < self.num_classes:
file_label.append('')
mfccs, mel_bands, specs = self.__extract_features(file, self.train_path + folder)
mfccs = np.mean(mfccs, axis=0)
mel_bands = np.mean(mel_bands, axis=0)
specs = np.mean(specs, axis=0)
features = np.hstack([mfccs, mel_bands, specs])
data = np.vstack([data, features])
labels = np.vstack((labels, file_label))
self.train_X = data
self.train_y = labels
return data, labels
def load_testing_data(self):
"""
Reads testPath and testFolder to parse test folders
"""
data = np.empty((0, 59))
labels = np.empty((0, self.num_classes))
for folder in self.test_folders:
files_in_folder = self.__list_files(self.test_path + folder)
proper_files = []
for file in files_in_folder:
if file.endswith(".txt"):
proper_files.append(file[:-4])
for file in proper_files:
file_label = self.__get_label_from_txt(self.test_path + folder + file + ".txt")
isValid = False
for train in self.train_folders:
for label in file_label:
if train[:-1] == label:
isValid = True
break
if not isValid:
continue
mfccs, bands, specs = self.__extract_features(file + ".wav", self.test_path + folder)
mfccs = np.mean(mfccs, axis=0)
bands = np.mean(bands, axis=0)
specs = np.mean(specs, axis=0)
for label in list(file_label):
if label + "/" in self.train_folders:
continue
else:
file_label.remove(label)
while len(file_label) < 3:
file_label.append('')
features = np.hstack([mfccs, bands, specs])
data = np.vstack([data, features])
labels = np.vstack([labels, file_label])
self.test_X = data
self.test_y = labels
return data, labels
def load_test_train_data(self):
self.load_training_data()
self.load_testing_data()
return self.train_X, self.test_X, self.train_y, self.test_y
|
nilq/baby-python
|
python
|
# DRUNKWATER TEMPLATE(add description and prototypes)
# Question Title and Description on leetcode.com
# Function Declaration and Function Prototypes on leetcode.com
#407. Trapping Rain Water II
#Given an m x n matrix of positive integers representing the height of each unit cell in a 2D elevation map, compute the volume of water it is able to trap after raining.
#Note:
#Both m and n are less than 110. The height of each unit cell is greater than 0 and is less than 20,000.
#Example:
#Given the following 3x6 height map:
#[
# [1,4,3,1,3,2],
# [3,2,1,3,2,4],
# [2,3,3,2,3,1]
#]
#Return 4.
#The above image represents the elevation map [[1,4,3,1,3,2],[3,2,1,3,2,4],[2,3,3,2,3,1]] before the rain.
#After the rain, water is trapped between the blocks. The total volume of water trapped is 4.
#class Solution:
# def trapRainWater(self, heightMap):
# """
# :type heightMap: List[List[int]]
# :rtype: int
# """
# Time Is Money
|
nilq/baby-python
|
python
|
"""
Track Atmosphere transactions across our system.
"""
from django.db import models
from uuid import uuid1
from django.utils import timezone
class T(models.Model):
"""
Track Atmosphere transactions across our system.
"""
# A unique UUID (V)alue for the transaction.
V = models.CharField(max_length=36)
start_date = models.DateTimeField(default=timezone.now)
end_date = models.DateTimeField(null=True)
def __unicode__(self):
return "%s: %s - %s" %\
(self.V, self.start_date, self.end_date)
@classmethod
def create(cls):
return cls(V=uuid1())
@classmethod
def get(cls):
t = T.create()
with transaction.atomic():
t.save()
return t
class Meta:
db_table = "transaction"
app_label = "core"
|
nilq/baby-python
|
python
|
from collections import deque
N, Q = map(int, input().split())
city = [[] for _ in range(N + 1)]
for i in range(N - 1):
a, b = map(int, input().split())
city[a].append(b)
city[b].append(a)
n_city = [-1] * (N + 1)
q = deque([])
q.append(1)
n_city[1] = 0
while q:
x = q.pop()
p = n_city[x]
for i in city[x]:
if n_city[i] != -1:
continue
q.append(i)
n_city[i] = p ^ 1
for i in range(Q):
c, d = map(int, input().split())
if n_city[c] == n_city[d]:
print("Town")
else:
print("Road")
|
nilq/baby-python
|
python
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetScalingPlanResult',
'AwaitableGetScalingPlanResult',
'get_scaling_plan',
]
@pulumi.output_type
class GetScalingPlanResult:
"""
Represents a scaling plan definition.
"""
def __init__(__self__, description=None, exclusion_tag=None, friendly_name=None, host_pool_references=None, host_pool_type=None, id=None, location=None, name=None, schedules=None, tags=None, time_zone=None, type=None):
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if exclusion_tag and not isinstance(exclusion_tag, str):
raise TypeError("Expected argument 'exclusion_tag' to be a str")
pulumi.set(__self__, "exclusion_tag", exclusion_tag)
if friendly_name and not isinstance(friendly_name, str):
raise TypeError("Expected argument 'friendly_name' to be a str")
pulumi.set(__self__, "friendly_name", friendly_name)
if host_pool_references and not isinstance(host_pool_references, list):
raise TypeError("Expected argument 'host_pool_references' to be a list")
pulumi.set(__self__, "host_pool_references", host_pool_references)
if host_pool_type and not isinstance(host_pool_type, str):
raise TypeError("Expected argument 'host_pool_type' to be a str")
pulumi.set(__self__, "host_pool_type", host_pool_type)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if schedules and not isinstance(schedules, list):
raise TypeError("Expected argument 'schedules' to be a list")
pulumi.set(__self__, "schedules", schedules)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if time_zone and not isinstance(time_zone, str):
raise TypeError("Expected argument 'time_zone' to be a str")
pulumi.set(__self__, "time_zone", time_zone)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of scaling plan.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="exclusionTag")
def exclusion_tag(self) -> Optional[str]:
"""
Exclusion tag for scaling plan.
"""
return pulumi.get(self, "exclusion_tag")
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> Optional[str]:
"""
User friendly name of scaling plan.
"""
return pulumi.get(self, "friendly_name")
@property
@pulumi.getter(name="hostPoolReferences")
def host_pool_references(self) -> Optional[Sequence['outputs.ScalingHostPoolReferenceResponse']]:
"""
List of ScalingHostPoolReference definitions.
"""
return pulumi.get(self, "host_pool_references")
@property
@pulumi.getter(name="hostPoolType")
def host_pool_type(self) -> Optional[str]:
"""
HostPool type for scaling plan.
"""
return pulumi.get(self, "host_pool_type")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def schedules(self) -> Optional[Sequence['outputs.ScalingScheduleResponse']]:
"""
List of ScalingSchedule definitions.
"""
return pulumi.get(self, "schedules")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> Optional[str]:
"""
Timezone of the scaling plan.
"""
return pulumi.get(self, "time_zone")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetScalingPlanResult(GetScalingPlanResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetScalingPlanResult(
description=self.description,
exclusion_tag=self.exclusion_tag,
friendly_name=self.friendly_name,
host_pool_references=self.host_pool_references,
host_pool_type=self.host_pool_type,
id=self.id,
location=self.location,
name=self.name,
schedules=self.schedules,
tags=self.tags,
time_zone=self.time_zone,
type=self.type)
def get_scaling_plan(resource_group_name: Optional[str] = None,
scaling_plan_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetScalingPlanResult:
"""
Represents a scaling plan definition.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str scaling_plan_name: The name of the scaling plan.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['scalingPlanName'] = scaling_plan_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:desktopvirtualization/v20210114preview:getScalingPlan', __args__, opts=opts, typ=GetScalingPlanResult).value
return AwaitableGetScalingPlanResult(
description=__ret__.description,
exclusion_tag=__ret__.exclusion_tag,
friendly_name=__ret__.friendly_name,
host_pool_references=__ret__.host_pool_references,
host_pool_type=__ret__.host_pool_type,
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
schedules=__ret__.schedules,
tags=__ret__.tags,
time_zone=__ret__.time_zone,
type=__ret__.type)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""driver drowsiness detection
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1Yjex8oAKte4yIZu91YXjJLsZLlR7pa0y
"""
from keras.models import Sequential
from keras.layers import MaxPool2D,Dropout,BatchNormalization,Dense,Conv2D,Flatten
import numpy as np
import glob
from PIL import Image
import matplotlib.pyplot as plt
from keras.preprocessing.image import ImageDataGenerator
s=Image.open('/content/drive/MyDrive/DL Practice/driver drowsiness detection/data/dataset_new/test/Closed/_719.jpg')
n=np.asarray(s)
n=np.array(n)
n.shape
plt.imshow(n)
train_datagen=ImageDataGenerator(rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen=ImageDataGenerator(rescale=1./255)
train_dataset=train_datagen.flow_from_directory(directory='/content/drive/MyDrive/DL Practice/driver drowsiness detection/data/dataset_new/train',
target_size=(24,24),
batch_size=32,
class_mode='categorical')
test_dataset=test_datagen.flow_from_directory('/content/drive/MyDrive/DL Practice/driver drowsiness detection/data/dataset_new/test',
target_size=(24,24),
batch_size=32,
class_mode='categorical')
folders=glob.glob('/content/drive/MyDrive/DL Practice/driver drowsiness detection/data/dataset_new/train/*')
len(folders)
folders
model=Sequential()
model.add(Conv2D(filters=32,kernel_size=(3,3),activation='relu',input_shape=(24,24,3)))
model.add(MaxPool2D(pool_size=(1,1)))
model.add(Conv2D(32,(3,3),activation='relu'))
model.add(MaxPool2D(pool_size=(1,1)))
model.add(Conv2D(64,(3,3),activation='relu'))
model.add(MaxPool2D((1,1)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128,activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(2,activation='softmax'))
model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy'])
model.summary()
model.fit_generator(train_dataset, validation_data=test_dataset,epochs=15,steps_per_epoch=len(train_dataset) ,validation_steps=len(test_dataset))
|
nilq/baby-python
|
python
|
# Construtores
'''
class carro:
def __init__(self,portas,preço,):
self.numero_portas = portas
self.preço = preço
print("Objeto instanciado com sucesso")
def get_numero_portas(self):
return self.numero_portas
carro1 = carro(6,50000,)
portas_carro1 = carro1.get_numero_portas()
print("Meu carro possui %d portas" %portas_carro1)
print(20*"=")
carro2 = carro(2,70000)
portas_carro2 = carro2.get_numero_portas()
print("Meu carro possuí %d portas" %portas_carro2)
================================================
#Métodos acessores
import array as a
meu_array = a.array('i',[1,2,3,4,5,2])
print(meu_array)
print(meu_array.index(2)) #Acessa o indice com valor igual a 2
print(meu_array.count(2)) #Retorna a quantiade que o numero aparece
'''
#Criando uma classe Carro
class carro:
def __init__(self,portas,valor):
self.numero_portas = portas
self.valor = valor
print("Objeto criado com sucesso ")
def get_numero_portas(self):
return self.numero_portas
def set_numero_portas(self,novo_numero_portas):
self.numero_portas = novo_numero_portas
carro1 = carro(2,60000)
print("Numero de portas é ", carro1.get_numero_portas())
carro1.set_numero_portas(5) # Modifica o numero de portas
print("O novo numero de portas é", carro1.get_numero_portas())
|
nilq/baby-python
|
python
|
import torch
import torch.nn as nn
from torch.nn import init
import torch.nn.functional as F
from library.text.modules.base.rnn import lstm_encoder
INI = 1e-2
class ConvSentEncoder(nn.Module):
"""
Convolutional word-level sentence encoder
w/ max-over-time pooling, [3, 4, 5] kernel sizes, ReLU activation
"""
def __init__(self, vocab_size, emb_dim, n_hidden, dropout):
"""
:param vocab_size:
:param emb_dim:
:param n_hidden:
:param dropout:
"""
super().__init__()
self._embedding = nn.Embedding(vocab_size, emb_dim, padding_idx=0)
self._convs = nn.ModuleList([nn.Conv1d(emb_dim, n_hidden, i)
for i in range(3, 6)])
self._dropout = dropout
self._grad_handle = None
def forward(self, input_):
"""
:param input_:
:return:
"""
emb_input = self._embedding(input_)
conv_in = F.dropout(emb_input.transpose(1, 2),
self._dropout, training=self.training)
output = torch.cat([F.relu(conv(conv_in)).max(dim=2)[0]
for conv in self._convs], dim=1)
return output
def set_embedding(self, embedding):
"""
:param embedding:
:return:
"""
"""embedding is the weight matrix"""
assert self._embedding.weight.size() == embedding.size()
self._embedding.weight.data.copy_(embedding)
class LSTMEncoder(nn.Module):
def __init__(self, input_dim, n_hidden, n_layer, dropout, bidirectional):
"""
:param input_dim:
:param n_hidden:
:param n_layer:
:param dropout:
:param bidirectional:
"""
super().__init__()
self._init_h = nn.Parameter(
torch.Tensor(n_layer*(2 if bidirectional else 1), n_hidden))
self._init_c = nn.Parameter(
torch.Tensor(n_layer*(2 if bidirectional else 1), n_hidden))
init.uniform_(self._init_h, -INI, INI)
init.uniform_(self._init_c, -INI, INI)
self._lstm = nn.LSTM(input_dim, n_hidden, n_layer,
dropout=dropout, bidirectional=bidirectional)
def forward(self, input_, in_lens=None):
"""
:param input_:
:param in_lens:
:return:
"""
""" [batch_size, max_num_sent, input_dim] Tensor"""
size = (self._init_h.size(0), input_.size(0), self._init_h.size(1))
init_states = (self._init_h.unsqueeze(1).expand(*size),
self._init_c.unsqueeze(1).expand(*size))
lstm_out, _ = lstm_encoder(input_, self._lstm, in_lens, init_states)
return lstm_out.transpose(0, 1)
@property
def input_size(self):
return self._lstm.input_size
@property
def hidden_size(self):
return self._lstm.hidden_size
@property
def num_layers(self):
return self._lstm.num_layers
@property
def bidirectional(self):
return self._lstm.bidirectional
|
nilq/baby-python
|
python
|
from django.contrib import admin
from django.contrib import messages
from django.utils.translation import ngettext
from .models import Category, Product
# Register your models here.
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
prepopulated_fields = {'slug':('name',)}
@admin.register(Product)
class ProductAdmin(admin.ModelAdmin):
list_display = ('name', 'price', 'available')
list_filter = ('available', 'created')
list_editable = ('price', 'available')
prepopulated_fields = {'slug':('name',)}
raw_id_fields = ('category',)
actions = ['make_published', 'make_draft']
@admin.action(description='Make selected stories as available')
def make_published(self, request, queryset):
updated = queryset.update(available=True)
self.message_user(request, ngettext(
'%d story was successfully marked as available.',
'%d stories were successfully marked as availabled.',
updated,
) % updated, messages.SUCCESS)
@admin.action(description='Make selected stories as unavailable')
def make_draft(self, request, queryset):
updated = queryset.update(available=False)
self.message_user(request, ngettext(
'%d story was successfully marked as unavailable.',
'%d stories were successfully marked as unavailabled.',
updated,
) % updated, messages.SUCCESS)
|
nilq/baby-python
|
python
|
#
# Copyright (c) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import functools
from collections import defaultdict
MAIN_NAMESPACE_NAME = "main"
NAME_TO_SYMBOL_MAPPING = defaultdict(dict)
class api_export(object):
"""Provides ways to export symbols to the ovmsclient API."""
def __init__(self, name, **kwargs):
self.main_name = name
self.namespaced_names = kwargs
def __call__(self, func):
# Create mapping for main namespace
NAME_TO_SYMBOL_MAPPING[MAIN_NAMESPACE_NAME][self.main_name] = (self.main_name, func)
# Create mapping for additional namespaces
for namespace, namespaced_name in self.namespaced_names.items():
NAME_TO_SYMBOL_MAPPING[namespace][namespaced_name] = (self.main_name, func)
return func
ovmsclient_export = functools.partial(api_export)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from odoo import models, fields, api, exceptions
class LibraryBook(models.Model):
_name = "library.book"
name = fields.Char(string="Name")
active = fields.Boolean("Is active", default=True)
image = fields.Binary()
pages = fields.Integer(string="# Pages")
isbn = fields.Char(string="ISBN", size=13)
description = fields.Html(string="Description")
category_id = fields.Many2one("library.category", string="Category")
@api.constrains('name')
def check_name(self):
if not self.name:
raise exceptions.ValidationError(
"Name must be filled!!!"
)
@api.constrains('pages')
def check_pages(self):
if self.pages <= 0:
raise exceptions.ValidationError(
"Pages must be > 0!!!"
)
|
nilq/baby-python
|
python
|
# Copyright 2010 Chet Luther <chet.luther@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import reactor
from twistedsnmp import agent, agentprotocol, bisectoidstore, datatypes
from twistedsnmp.pysnmpproto import v2c, rfc1902
import sys
import os
import re
import csv
# twistedsnmp has a bug that causes it to fail to properly convert
# Counter64 values. We workaround this by retroactively fixing datatypes
# mappings.
fixed_v2Mapping = []
for datatype, converter in datatypes.v2Mapping:
if datatype == v2c.Counter64:
fixed_v2Mapping.append(
(datatype, datatypes.SimpleConverter(v2c.Counter64)))
else:
fixed_v2Mapping.append((datatype, converter))
datatypes.v2Mapping = fixed_v2Mapping
fixed_v1Mapping = [(rfc1902.Counter64, datatypes.SimpleConverter(v2c.Counter64))]
for datatype, converter in datatypes.v1Mapping:
if datatype != rfc1902.Counter64:
fixed_v1Mapping.append((datatype, converter))
datatypes.v1Mapping = fixed_v1Mapping
def sanitize_dotted(string):
'''
Return dotted decimal strings with non-numerics replaced with 1.
This is necessary because some snmpwalk output files have had IP
addresses obscured with non-numeric characters.
'''
return re.sub(r'[^ \.\da-fA-F]', '1', string)
class SNMPosterFactory:
agents = []
def configure(self, filename):
reader = csv.reader(open(filename, "rb"))
for row in reader:
if row[0].startswith('#'):
continue
self.agents.append({
'filename': row[0],
'ip': row[1]})
def start(self):
for a in self.agents:
print "Starting %s on %s." % (a['filename'], a['ip'])
if os.uname()[0] == 'Darwin':
os.popen("ifconfig lo0 alias %s up" % (a['ip'],))
elif os.uname()[0] == 'Linux':
os.popen("/sbin/ip addr add %s dev lo" % (a['ip'],))
else:
print "WARNING: Unable to add loopback alias on this platform."
faker = SNMPoster(a['ip'], a['filename'])
faker.run()
daemonize()
reactor.run()
class SNMPoster:
oidData = {}
sortedOids = []
def __init__(self, ip, filename):
self.ip = ip
self.oids = {}
oid = ''
type_ = ''
value = []
snmpwalk = open(filename, 'r')
for line in snmpwalk:
line = line.rstrip()
# Typed match.
match = re.search(r'^([^ ]+) = ([^\:]+):\s*(.*)$', line)
if not match:
# Untyped match.
match = re.search(r'^([^ ]+) = (.*)$', line)
if match:
if len(value) > 0:
self.add_oid_value(oid, type_, value)
oid = ''
type_ = ''
value = []
groups = match.groups()
if len(groups) == 3:
oid, type_, value1 = groups
elif groups[1].startswith('"') and groups[1].endswith('"'):
oid, type_, value1 = (groups[0], 'STRING', groups[1])
else:
oid, type_, value1 = (groups[0], 'INTEGER', groups[1])
oid = sanitize_dotted(oid)
if type_ == 'Timeticks':
value1 = re.search(r'^\((\d+)\) .*$', value1).groups()[0]
value.append(value1.strip('"'))
else:
value.append(line.strip('"'))
snmpwalk.close()
if oid and type_:
self.add_oid_value(oid, type_, value)
def add_oid_value(self, oid, type_, value):
if type_ == 'Counter32':
self.oids[oid] = v2c.Counter32(self.tryIntConvert(value[0]))
elif type_ == 'Counter64':
self.oids[oid] = rfc1902.Counter64(long(value[0]))
elif type_ == 'Gauge32':
self.oids[oid] = v2c.Gauge32(self.tryIntConvert(value[0]))
elif type_ == 'Hex-STRING':
value = [sanitize_dotted(x) for x in value]
self.oids[oid] = ''.join(
[chr(int(c, 16)) for c in ' '.join(value).split(' ')])
elif type_ == 'INTEGER':
self.oids[oid] = self.tryIntConvert(value[0])
elif type_ == 'IpAddress':
value[0] = sanitize_dotted(value[0])
self.oids[oid] = v2c.IpAddress(value[0])
elif type_ == 'OID':
self.oids[oid] = v2c.ObjectIdentifier(value[0])
elif type_ == 'STRING':
self.oids[oid] = '\n'.join(value)
elif type_ == 'Timeticks':
self.oids[oid] = v2c.TimeTicks(int(value[0]))
def tryIntConvert(self, myint):
conv = -1
try:
conv = int(myint)
except:
m = re.match(".*\((?P<myint>\d+)\).*|(?P<myint2>\d+).*", myint)
if m:
myint2 = m.groupdict()["myint"] or m.groupdict()["myint2"]
try:
conv = int(myint2)
except:
pass
return conv
def start(self):
reactor.listenUDP(
161, agentprotocol.AgentProtocol(
snmpVersion='v2c',
agent=agent.Agent(
dataStore=bisectoidstore.BisectOIDStore(
OIDs=self.oids,
),
),
),
interface=self.ip,
)
def run(self):
reactor.callWhenRunning(self.start)
def daemonize():
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
os.chdir("/")
os.setsid()
os.umask(0)
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
|
nilq/baby-python
|
python
|
import frappe
import json
import frappe.utils
from frappe import _
from frappe.model.naming import make_autoname
import frappe.defaults
import phonenumbers
from frappe.utils import encode
# from erpnext.selling.doctype.customer.customer import get_customer_outstanding
@frappe.whitelist(allow_guest=True)
def get_customer_credit_limit_with_oustanding(so):
sales_order=frappe.get_doc("Sales Order",so)
cust=frappe.get_doc("Customer",sales_order.customer)
credit_limit= cust.credit_limit
name=cust.name
company=sales_order.company
outstanding_amount = get_customer_outstanding(name, company)
# print "Outstangiing Amount",outstanding_amount
# print"outstanding is", get_customer_outstanding(name, company)
# print "Credit Limit is",credit_limit
available_amount=credit_limit-outstanding_amount
# print "available_amount",available_amount
if sales_order.grand_total>available_amount:
# print "Outstanding"
return 0
else:
# print "No Outstanding"
return 1
@frappe.whitelist()
def create_sal_slip(doc):
"""
Creates salary slip for selected employees if already not created
"""
doc1=json.loads(doc)
pp=frappe.get_doc("Process Payroll",doc1.get('name'))
emp_list=pp.get_emp_list()
# emp_list = []
ss_list = []
for emp in emp_list:
employee=frappe.get_doc("Employee",emp[0])
# if employee.esi_ip_number:
# print "ESI IP",employee.esi_ip_number
# if not frappe.db.sql("""select name from `tabSalary Slip`
# where docstatus!= 2 and employee = %s and month = %s and fiscal_year = %s and company = %s
# """, (emp[0], doc1.get('month'), doc1.get('fiscal_year'), doc1.get('company')):
# ss = frappe.get_doc({
# "doctype": "Salary Slip",
# "fiscal_year": doc.fiscal_year,
# "employee": emp[0],
# "month": doc.month,
# "company": doc.get("company"),
# "esi_ip_number":employee.esi_ip_number,
# "pan":employee.pan
# # "epfo_pf_account_number":emp[0].epfo_pf_account_number,
# # "esi_ip_number":emp[0].esi_ip_number,
# # "pan":e[0].pan
# })
# # print "employee",emp[0].employee_name
# ss.insert()
# ss_list.append(ss.name)
# return doc.create_log(ss_list)
def customer_validation(doc,method):
roles=frappe.get_roles(frappe.session.user)
if "Distributer" in roles:
if doc.customer_group=="Distributer" or doc.customer_group=="Super Stockist":
frappe.throw(_("You can not create a Distributor or Super Stockist"))
if doc.customer_group=="Distributer":
company_check=frappe.db.get_value("Company",{"company_name":doc.customer_name},"company_name")
if not company_check:
company=frappe.new_doc("Company")
company.company_name=doc.customer_name
company.abbr=doc.customer_name[0:5]
company.default_currency="INR"
company.save()
def delivery_note_submit(doc,method):
customer=frappe.get_doc("Customer",doc.customer)
if customer.customer_group=="Distributer":
se=frappe.new_doc("Stock Entry")
se.purpose="Material Receipt"
se.posting_date=frappe.utils.nowdate()
se.posting_time=frappe.utils.nowtime()
se.company=customer.customer_name
# se.from_warehouse="Finished Goods"+ " - " + customer.customer_name[5]
# se.from_warehouse = "Stores - GIPL"
for raw in doc.get("items"):
se_items = se.append('items', {})
se_items.item_code=raw.item_code
se_items.qty=raw.qty
se_items.uom=raw.stock_uom
se_items.t_warehouse="Finished Goods" + " " + "-" + " " + doc.customer_name[0:5]
se_items.cost_center="Main" + " " + "-" + " " + doc.customer_name[0:5]
se.save()
se.submit()
def employee_autoname(doc,method):
# frappe.errprint("Inside autoname emp ")
total=0
company_code=str(frappe.db.get_value("Company",{"name":doc.company},"code"))
employmement_code=doc.code
number=make_autoname(company_code+str(employmement_code)+'.####')
# temp_code=number.split()
for i in number:
j=1
if j%2==0:
total=total+int(i)*1
j+=1
else:
total=total+int(i)*3
j+=1
sum_last_digit=total%10
if sum_last_digit==0:
check_digit=0
else:
check_digit=10-sum_last_digit
doc.name=str(number)+str(check_digit)
@frappe.whitelist()
def item_autoname(brand):
brand_code=str(frappe.db.get_value("Brand",{"name":brand},"code"))
company_code=str(frappe.db.get_value("Company",{"company_name":frappe.defaults.get_defaults().get("company")},"code"))
doc.item_code = brand_code + '0001'
substring_item_code = make_autoname(brand_code + '.####')
item_code=str(substring_item_code)+ str(company_code)
return item_code
def so_validate(doc,method):
print "so validate 2222222222222222222222222222222222222222"
# print "Inside ------------------------"
if doc.company!=frappe.defaults.get_defaults().get("company"):
# print "Inside if "
for raw in doc.get("items"):
raw.warehouse="Finished Goods" + " " + "-" + " " + doc.company[0:5]
# print raw.warehouse,"Arpit_____________________"
def employement_type_code_check(doc,method):
code=frappe.db.get_value("Employment Type",{"code":doc.code},"code")
if code==doc.code:
frappe.throw(_("Employment Code already present ,please choose diffrent code"))
def customer_filter(doctype, txt, searchfield, start, page_len, filters):
data=frappe.db.sql("""select name from `tabCustomer`where customer_group!='Distributer' """)
return data
def make_title_case(doc, method):
title_case_format(doc);
def title_case_documents():
documents = {
"Customer":"customer_name", "Employee":"employee_name",
"Sales Person":"sales_person_name", "Lead":"lead_name",
"User":"full_name","Supplier": "supplier_name",
"Contact":"first_name", "Sales Partner":"partner_name"
}
return documents
def title_case_format(doc):
docs = title_case_documents()
if doc.doctype in docs.keys():
field = docs[doc.doctype]
if field:
doc.update({field: doc.get(field).title()})
def generate_calling_code(doc, method):
country = frappe.defaults.get_defaults().get("country")
docs = phone_format_docs()
if doc.doctype in docs:
if country:
country_code = (frappe.db.get_value("Country", country, "code")).upper()
field = docs[doc.doctype]
if field and type(field) == list:
for f in field:
if doc.get(f):
x = phonenumbers.parse(encode(doc.get(f)), (encode(country_code)))
no_format = phonenumbers.format_number(x, phonenumbers.PhoneNumberFormat.INTERNATIONAL)
doc.update({f: no_format})
elif field and doc.get(field):
x = phonenumbers.parse(encode(doc.get(field)), (encode(country_code)))
no_format = phonenumbers.format_number(x, phonenumbers.PhoneNumberFormat.INTERNATIONAL)
doc.update({field: no_format})
def phone_format_docs():
docs = {
"Address":"phone", "Contact":["mobile_no", "phone"], "Employee": "cell_number",
"Lead": ["phone", "mobile_no"]
}
return docs
|
nilq/baby-python
|
python
|
from PyQt5 import QtGui,QtWidgets,QtCore
from PyQt5.QtWidgets import QApplication,QRadioButton,QPushButton,QListWidget,QDial,QSpinBox,QLCDNumber,QMessageBox,QLabel
from PyQt5.QtGui import QPixmap
import sys
import matplotlib.pyplot as plt
from matplotlib.figure import Figure
import numpy as np
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
import matplotlib.pyplot as plt
import time
import threading
from digi.xbee.devices import *
import serial
import platform
class window(QtWidgets.QMainWindow):
def __init__(self):
super(window,self).__init__()
self.currentlocal=0
self.data=None
self.checker=0
self.lastcolored=0
self.photo = QLabel(self)
self.port=0
self.pixmap = QPixmap('photo.png')
self.pixmap = self.pixmap.scaled(600, 300, QtCore.Qt.KeepAspectRatio)
self.photo.setPixmap(self.pixmap)
self.labelgif=QLabel(self)
self.labelgif.setStyleSheet("QLabel { background-color : white;}");
self.labelgif.setGeometry(100,50,500,430)
self.movie = QtGui.QMovie('data.gif', QtCore.QByteArray(),self.labelgif)
self.movie.setSpeed(100)
self.labelgif.setMovie(self.movie)
self.movie.start()
self.labelgif.setVisible(False)
self.labelyazi=QLabel(self)
self.labelgif1=QLabel(self)
self.labelgif1.setStyleSheet("QLabel { background-color : white;}")
self.labelyazi.setText('G'+"\u00F6"+"zl"+"\u0259"+"yin..")
font1=QtGui.QFont('Times',17)
self.labelyazi.setFont(font1)
self.labelyazi.setVisible(False)
self.labelyazi.setGeometry(350,150,150,60)
self.labelgif1.setGeometry(150,100,489,289)
self.movie1 = QtGui.QMovie('wait.gif', QtCore.QByteArray(),self.labelgif1)
self.movie1.setSpeed(100)
self.labelgif1.setMovie(self.movie1)
self.movie1.start()
self.labelgif1.setVisible(False)
self.setWindowTitle("Diplom i\u015Fi v1")
self.setWindowIcon(QtGui.QIcon('pyicon.png'))
self.button = QPushButton('PyQt5 button', self)#button yaradildi
self.listw=QListWidget(self)#listWidget yaradildi
self.button1=QPushButton(self)
self.buttonlocal=QPushButton(self)
self.buttonlocal.setText('Qo\u015F')
self.button1.setText("Temperaturu"+" " +"\u00F6"+"l"+"\u00E7")
self.dial=QDial(self)
self.lcd=QLCDNumber(self)
self.label=QLabel(self)
self.labelrefresh=QLabel(self)
self.obj=[]
self.listCOM=QListWidget(self)
self.spin=QSpinBox(self)
self.radiosan=QRadioButton(self)
self.radiosan.setText("Saniy"+"\u0259")
self.radiodeq=QRadioButton(self)
self.radiodeq.setText("D"+"\u0259"+"qiq"+"\u0259")
self.starting()
self.initUI()
def initUI(self):
self.setFixedSize(700,500)
self.dial.setNotchesVisible(True)
self.labelrefresh.setText('Yenil\u0259m\u0259k \u00FC\u00E7\u00FCn F5 d\u00FCym\u0259sini s\u0131x\u0131n ')
self.labelrefresh.setStyleSheet("QLabel{background-color: yellow; }")
font=QtGui.QFont('Times',10,QtGui.QFont.Bold)
self.labelrefresh.setFont(font)
self.lcd.setVisible(False)
self.photo.setVisible(False)
self.photo.raise_()
self.labelgif.raise_()
self.labelgif1.raise_()
self.labelyazi.raise_()
self.spin.setRange(1,60)
self.dial.setRange(1,60)
self.button.setText("\u015E"+"\u0259"+"b\u0259k\u0259ni yoxla")
self.button1.setEnabled(False)
self.button.setEnabled(False)
self.spin.setEnabled(False)
self.dial.setEnabled(False)
self.radiosan.setEnabled(False)
self.radiodeq.setEnabled(False)
self.label.setText('Qo\u015Fulmu'+'\u015F cihaz yoxdur')
self.label.setStyleSheet("QLabel { background-color : #e20000; color : black; }");
newfont = QtGui.QFont('Times',11)
self.label.setFont(newfont)
#geometries
self.setGeometry(40,50,700,500)
self.button.setGeometry(20,40,120,50)
self.listw.setGeometry(380,160,300,200)
self.button1.setGeometry(575,40,120,50)
self.dial.setGeometry(40,400,75,70)
self.spin.setGeometry(150,425,50,25)
self.radiosan.setGeometry(150,400,75,25)
self.radiodeq.setGeometry(150,380,75,25)
self.lcd.setGeometry(300,40,100,50)
self.buttonlocal.setGeometry(150,40,125,50)
self.label.setGeometry(520,440,155,30)
self.listCOM.setGeometry(20,160,300,200)
self.labelrefresh.setGeometry(20,100,220,30)
self.photo.setGeometry(50,100,600,300)
#events
self.buttonlocal.clicked.connect(self.checklocal)
self.button.clicked.connect(self.thread1)
self.button.clicked.connect(self.threadnetwork)
self.dial.valueChanged.connect(self.spin.setValue)
self.spin.valueChanged.connect(self.dial.setValue)
self.listCOM.doubleClicked.connect(self.showdialog)
self.listw.doubleClicked.connect(self.showdialogremote)
self.button1.clicked.connect(self.thread) # communication
self.radiodeq.clicked.connect(self.spinvalue)
self.radiosan.clicked.connect(self.dialvalue)
self.button1.clicked.connect(self.threadback)
def threadback(self):
if self.radiodeq.isChecked() or self.radiosan.isChecked():
self.thread1=threading.Thread(target=self.send)
self.thread1.start()
else:
pass
def loading(self):
m=loading()
def send(self):
try:
self.currentlocal.open()
remotestr=self.listw.currentItem().text()
li=remotestr.split("-")
xbee_network=self.currentlocal.get_network()
remote=xbee_network.get_device_by_64(XBee64BitAddress.from_hex_string(li[0]))
arr_64=self.currentlocal.get_64bit_addr()
NEW_TIMEOUT_FOR_SYNC_OPERATIONS = 1
self.currentlocal.set_sync_ops_timeout(NEW_TIMEOUT_FOR_SYNC_OPERATIONS)
if self.radiosan.isChecked():
self.currentlocal.send_data(remote,str(arr_64)+"-"+str(self.spin.value()))
else:
self.currentlocal.send_data(remote,str(arr_64)+"-"+str(self.spin.value()*60))
self.labelgif1.setVisible(False)
self.labelgif1.setVisible(True)
self.labelyazi.setVisible(True)
while(True):
self.data=self.currentlocal.read_data()
if(self.data!=None):
self.data=self.data.data.decode()
self.labelgif1.setVisible(False)
self.labelyazi.setVisible(False)
break
self.currentlocal.close()
data_list=self.data.split(',')
self.labelgif.setVisible(True)
objects = []
performance=[]
for i in range(1,len(data_list)):
objects.append(i)
for i in range(len(data_list)-1):
li=data_list[i]
li=li.split('-')
performance.append(li[1])
y_pos = np.arange(len(objects))
objects=tuple(objects)
plt.figure("Qrafik")
plt.xticks(y_pos, objects)
plt.ylabel('Temperatur')
plt.xlabel('Zaman')
plt.plot(y_pos,performance)
self.labelgif.setVisible(False)
plt.show()
self.data=None
except:
print('salam')
self.currentlocal.close()
def showdialog(self):
try:
li=self.listCOM.currentItem().text().split('-')
local=XBeeDevice(li[2],9600)
local.open()
arr_64=local.get_64bit_addr()
arr_16=local.get_16bit_addr()
arr_node=local.get_node_id()
arr_pro=local.get_protocol()
arr_hard=local.get_hardware_version()
local.close()
dlg=dialog(arr_64,arr_16,arr_node,arr_pro,arr_hard)
except:
pass #exception
def showdialogremote(self):
li=self.listw.currentItem().text().split('-')
if self.checker !=0:
self.lastcolored.setBackground(QtGui.QColor(255,255,255))
self.lastcolored=self.listw.currentItem()
self.listw.currentItem().setBackground(QtGui.QColor(239, 255, 25))
try:
self.currentlocal.open()
xbee_network=self.currentlocal.get_network()
remote=xbee_network.get_device_by_64(XBee64BitAddress.from_hex_string(li[0]))
arr_64=remote.get_64bit_addr()
arr_16=remote.get_16bit_addr()
arr_node=remote.get_node_id()
arr_pro=remote.get_protocol()
arr_hard=remote.get_hardware_version()
self.currentlocal.close()
dlg=dialog(arr_64,arr_16,arr_node,arr_pro,arr_hard)
self.checker=1
except:
pass # exception
def spinvalue(self):
self.dial.setRange(1,60)
self.spin.setRange(1,60)
self.dial.setValue(1)
def dialvalue(self):
self.dial.setRange(4,60)
self.spin.setRange(4,60)
self.dial.setValue(4)
def keyPressEvent(self, event):
key = event.key()
if key == QtCore.Qt.Key_F5:
self.threadrefresh()
def checklocal(self):
try:
if (self.currentlocal !=0):
for i in range(0,self.listCOM.count()):
self.listCOM.item(i).setBackground(QtGui.QColor(255, 255, 255))
self.listCOM.currentItem().setBackground(QtGui.QColor(97, 255, 66))
li=self.listCOM.currentItem().text().split('-')
self.currentlocal = XBeeDevice(li[2], 9600)
self.port=li[2]
self.currentCOM=self.listCOM.currentItem().text()
self.currentlocal.open()
self.currentlocal.close()
self.listw.clear()
self.button1.setEnabled(True)
self.button.setEnabled(True)
self.spin.setEnabled(True)
self.dial.setEnabled(True)
self.radiosan.setEnabled(True)
self.radiodeq.setEnabled(True)
if platform.system()=='Linux':
self.label.setGeometry(500,440,180,30)
self.label.setText('Qo\u015Fulmu'+'\u015F port: '+str(li[2]))
self.checker=0
self.label.setStyleSheet("QLabel { background-color : #22ce00; color : white; }")
except:
QMessageBox.about(self, 'Yanl\u0131\u015F', 'Lokal cihaz\u0131n portu do\u011Fru deyil')
def refresh(self):
self.listCOM.clear()
index=0
if platform.system()=='Windows':
for i in range(0,257):
try:
local_xbee = XBeeDevice('COM'+str(i), 9600)
local_xbee.open()
addr64=local_xbee.get_64bit_addr()
noid=local_xbee.get_node_id()
local_xbee.close()
self.listCOM.addItem(str(addr64)+"-"+str(noid)+"-"+'COM'+str(i))
if(self.port=='COM'+str(i)):
self.listCOM.item(index).setBackground(QtGui.QColor(97, 255, 66))
index+=1
except:
pass
elif platform.system()=='Linux':
for i in range(257):
try:
local_xbee = XBeeDevice('/dev/ttyUSB'+str(i), 9600)
local_xbee.open()
addr64=local_xbee.get_64bit_addr()
noid=local_xbee.get_node_id()
local_xbee.close()
self.listCOM.addItem(str(addr64)+"-"+str(noid)+"-"+'/dev/ttyUSB'+str(i))
if(self.port=='/dev/ttyUSB'+str(i)):
self.listCOM.item(index).setBackground(QtGui.QColor(97, 255, 66))
index+=1
except:
pass
self.checker=0
def thread(self):
if self.radiodeq.isChecked() or self.radiosan.isChecked():
self.thread=threading.Thread(target=self.timing)
self.thread.start()
else:
QMessageBox.about(self, 'Yanl\u0131\u015F', 'Zaman vahidini se\u00E7in')
def thread1(self):
if self.radiodeq.isChecked() or self.radiosan.isChecked():
self.thread1=threading.Thread(target=self.scan)
self.thread1.start()
else:
QMessageBox.about(self, 'Yanl\u0131\u015F', 'Zaman vahidini se\u00E7in')
def threadnetwork(self):
if self.radiodeq.isChecked() or self.radiosan.isChecked():
self.thread1=threading.Thread(target=self.network)
self.thread1.start()
else:
pass
def network(self):
try:
self.button1.setEnabled(False)
self.buttonlocal.setEnabled(False)
self.button.setEnabled(False)
self.button1.setEnabled(False)
self.spin.setEnabled(False)
self.dial.setEnabled(False)
self.radiosan.setEnabled(False)
self.radiodeq.setEnabled(False)
self.listw.clear()
self.currentlocal.open()
xbee_network=self.currentlocal.get_network()
xbee_network.clear()
listdev=[]
def callback_device_discovered(remote):
listdev.append(str(remote))
if self.radiosan.isChecked():
if(self.spin.value()>25):
defe=int((self.spin.value())/25)
qaliqsan=(self.spin.value())%25
for i in range(0,defe):
xbee_network.set_discovery_timeout(22)
xbee_network.add_device_discovered_callback(callback_device_discovered)
xbee_network.start_discovery_process()
while xbee_network.is_discovery_running():
QtCore.QThread.msleep(100)
if(qaliqsan<4):
add=q=4-qaliqsan
xbee_network.set_discovery_timeout(qaliqsan+add)
xbee_network.add_device_discovered_callback(callback_device_discovered)
xbee_network.start_discovery_process()
while xbee_network.is_discovery_running():
QtCore.QThread.msleep(100)
else:
xbee_network.set_discovery_timeout(qaliqsan)
xbee_network.add_device_discovered_callback(callback_device_discovered)
xbee_network.start_discovery_process()
while xbee_network.is_discovery_running():
QtCore.QThread.msleep(100)
self.currentlocal.close()
else:
xbee_network.set_discovery_timeout(self.spin.value())
xbee_network.add_device_discovered_callback(callback_device_discovered)
xbee_network.start_discovery_process()
while xbee_network.is_discovery_running():
QtCore.QThread.msleep(100)
self.currentlocal.close()
self.photo.setVisible(True)
listdev=list(set(listdev))
for i in range(0,len(listdev)):
self.listw.addItem(listdev[i])
QtCore.QThread.msleep(1000)
self.photo.setEnabled(True)
self.buttonlocal.setEnabled(True)
self.button1.setEnabled(True)
self.button.setEnabled(True)
self.spin.setEnabled(True)
self.dial.setEnabled(True)
self.radiosan.setEnabled(True)
self.radiodeq.setEnabled(True)
self.photo.setVisible(False)
if self.radiodeq.isChecked():
defe=int((self.spin.value()*60)/25)
qaliqsan=(self.spin.value()*60)%25
for i in range(0,defe):
xbee_network.set_discovery_timeout(22) # 24 seconds + saniye elave.
xbee_network.add_device_discovered_callback(callback_device_discovered)
xbee_network.start_discovery_process()
while xbee_network.is_discovery_running():
QtCore.QThread.msleep(100)
xbee_network.set_discovery_timeout(qaliqsan) # qaliq saniye.
xbee_network.add_device_discovered_callback(callback_device_discovered)
xbee_network.start_discovery_process()
while xbee_network.is_discovery_running():
QtCore.QThread.msleep(100)
self.currentlocal.close()
else:
xbee_network.set_discovery_timeout(self.spin.value()) # qaliq saniye
xbee_network.add_device_discovered_callback(callback_device_discovered)
xbee_network.start_discovery_process()
while xbee_network.is_discovery_running():
QtCore.QThread.msleep(100)
self.currentlocal.close()
self.photo.setVisible(True)
listdev=list(set(listdev))
for i in range(0,len(listdev)):
self.listw.addItem(listdev[i])
QtCore.QThread.msleep(2000)
self.buttonlocal.setEnabled(True)
self.button1.setEnabled(True)
self.button.setEnabled(True)
self.spin.setEnabled(True)
self.dial.setEnabled(True)
self.radiosan.setEnabled(True)
self.radiodeq.setEnabled(True)
self.photo.setVisible(False)
except:
self.currentlocal.close()
def threadrefresh(self):
t=threading.Thread(target=self.refresh)
t.start()
#UI has been finished
def timing(self):
QtCore.QThread.msleep(1000)
self.button1.setEnabled(False)
if(self.radiodeq.isChecked()):
self.lcd.setVisible(True)
j=self.spin.value()*60
k=self.spin.value()
if(k<10):
self.lcd.display("0{}:00".format(k))
QtCore.QThread.msleep(1000)
else:
self.lcd.display("{}:00".format(k))
QtCore.QThread.msleep(1000)
j-=1
k-=1
while(j>-1):
if(k<10):
if(j%60<10):
if(j%60 is 0):
self.lcd.display("0{}:0{}".format(k,j%60))
k-=1
j-=1
QtCore.QThread.msleep(1000)
continue
self.lcd.display("0{}:0{}".format(k,j%60))
app.processEvents()
QtCore.QThread.msleep(1000)
j-=1
else:
self.lcd.display("0{}:{}".format(k,j%60))
QtCore.QThread.msleep(1000)
j-=1
else:
if(j%60 is 0):
self.lcd.display("0{}:0{}".format(k,j%60))
k-=1
j-=1
QtCore.QThread.msleep(1000)
continue
if(j%60<10):
self.lcd.display("{}:0{}".format(k,j%60))
QtCore.QThread.msleep(1000)
j-=1
else:
self.lcd.display("{}:{}".format(k,j%60))
QtCore.QThread.msleep(1000)
j-=1
self.lcd.setVisible(False)
self.button1.setEnabled(True)
elif (self.radiosan.isChecked()):
self.lcd.setVisible(True)
timing=self.spin.value()
for i in range(timing,-1,-1):
if(i<10):
self.lcd.display("00:0{}".format(i))
QtCore.QThread.msleep(1000)
else:
self.lcd.display("00:{}".format(i))
QtCore.QThread.msleep(1000)
self.lcd.setVisible(False)
self.button1.setEnabled(True)
def starting(self):
splash=QtWidgets.QSplashScreen(QtGui.QPixmap('splash.jpg'),QtCore.Qt.WindowStaysOnTopHint)
splash.show()
for i in range(0,257):
app.processEvents()
if (i is 50):
splash.showMessage("<h1><font color=#608fdb>Proqram başladılır!</font></h1>", QtCore.Qt.AlignTop)
QtCore.QThread.msleep(1000)
try:
if (platform.system() == 'Windows'):
local_xbee = XBeeDevice('COM'+str(i), 9600)
local_xbee.open()
addr64=local_xbee.get_64bit_addr()
noid=local_xbee.get_node_id()
local_xbee.close()
self.listCOM.addItem(str(addr64)+"-"+str(noid)+"-"+'COM'+str(i))
elif (platform.system() == 'Linux'):
local_xbee = XBeeDevice('/dev/ttyUSB'+str(i), 9600)
local_xbee.open()
addr64=local_xbee.get_64bit_addr()
noid=local_xbee.get_node_id()
local_xbee.close()
self.listCOM.addItem(str(addr64)+"-"+str(noid)+"-"+'/dev/ttyUSB'+str(i))
except:
pass
splash.close()
def createlistw(self):
self.listw.clear()
for i in range(0,9):
self.obj.append(i)
self.obj[i]=elements()
self.obj[i].t=[10,20,30,40,2,3,4,5,6]
self.obj[i].s=[5,6,7,8,9,1,2,3,4,5,88]
self.listw.addItem(str(self.obj[i].t[i]))
def scan(self):
self.button.setEnabled(False)
if(self.radiodeq.isChecked()):
self.lcd.setVisible(True)
j=self.spin.value()*60
k=self.spin.value()
if(k<10):
self.lcd.display("0{}:00".format(k))
QtCore.QThread.msleep(1000)
else:
self.lcd.display("{}:00".format(k))
QtCore.QThread.msleep(1000)
j-=1
k-=1
while(j>-1):
if(k<10):
if(j%60<10):
if(j%60 is 0):
self.lcd.display("0{}:0{}".format(k,j%60))
k-=1
j-=1
QtCore.QThread.msleep(1000)
continue
self.lcd.display("0{}:0{}".format(k,j%60))
app.processEvents()
QtCore.QThread.msleep(1000)
j-=1
else:
self.lcd.display("0{}:{}".format(k,j%60))
QtCore.QThread.msleep(1000)
j-=1
else:
if(j%60 is 0):
self.lcd.display("0{}:0{}".format(k,j%60))
k-=1
j-=1
QtCore.QThread.msleep(1000)
continue
if(j%60<10):
self.lcd.display("{}:0{}".format(k,j%60))
QtCore.QThread.msleep(1000)
j-=1
else:
self.lcd.display("{}:{}".format(k,j%60))
QtCore.QThread.msleep(1000)
j-=1
self.lcd.setVisible(False)
self.button.setEnabled(True)
elif (self.radiosan.isChecked()):
self.lcd.setVisible(True)
timing=self.spin.value()
for i in range(timing,-1,-1):
if(i<10):
self.lcd.display("00:0{}".format(i))
QtCore.QThread.msleep(1000)
else:
self.lcd.display("00:{}".format(i))
QtCore.QThread.msleep(1000)
self.lcd.setVisible(False)
self.button.setEnabled(True)
class dialog(QtWidgets.QDialog):
def __init__(self,edit64,edit16,editnode,editpro,edithard):
super(dialog,self).__init__()
self.setWindowIcon(QtGui.QIcon('pyicon.png'))
self.text_64=str(edit64)
self.text_16=str(edit16)
self.text_node=str(editnode)
protocol=str(editpro).split('.')
self.text_pro=str(protocol[1])
self.text_hard=str(edithard)
self.setFixedSize(470,325)
self.setWindowTitle("Haqq\u0131nda")
self.uiinit()
self.show()
self.exec_()
def uiinit(self):
newfont = QtGui.QFont('Times',11,QtGui.QFont.Bold)
self.photo = QLabel(self)
self.pixmap = QPixmap('xbee-wire.jpg')
self.pixmap=self.pixmap.scaled(225,300,QtCore.Qt.KeepAspectRatio)
self.photo.setPixmap(self.pixmap)
self.photo.move(0,20)
self.label_64=QLabel("64-bitlik "+"\u00DC"+"nvan",self)
self.label_64.setGeometry(250,0,150,30)
self.label_64.setFont(newfont)
self.line_64=QtWidgets.QLineEdit(self)
self.line_64.setGeometry(250,30,210,30)
self.line_64.setText(self.text_64)
self.line_64.setReadOnly(True)
self.label_16=QLabel("16-bitlik "+"\u00DC"+"nvan",self)
self.label_16.setGeometry(250,60,150,30)
self.label_16.setFont(newfont)
self.line_16=QtWidgets.QLineEdit(self)
self.line_16.setGeometry(250,90,210,30)
self.line_16.setText(self.text_16)
self.line_16.setReadOnly(True)
self.label_nodeid=QLabel("Ad\u0131",self)
self.label_nodeid.setGeometry(250,120,150,30)
self.label_nodeid.setFont(newfont)
self.line_nodeid=QtWidgets.QLineEdit(self)
self.line_nodeid.setGeometry(250,150,210,30)
self.line_nodeid.setText(self.text_node)
self.line_nodeid.setReadOnly(True)
self.label_firm=QLabel('Protokol',self)
self.label_firm.setGeometry(250,180,210,30)
self.label_firm.setFont(newfont)
self.line_firm=QtWidgets.QLineEdit(self)
self.line_firm.setGeometry(250,210,210,30)
self.line_firm.setText(self.text_pro)
self.line_firm.setReadOnly(True)
self.label_hard=QLabel("Aparat versiyas"+"\u0131",self)
self.label_hard.setGeometry(250,240,210,30)
self.label_hard.setFont(newfont)
self.line_hard=QtWidgets.QLineEdit(self)
self.line_hard.setGeometry(250,270,210,30)
self.line_hard.setText(self.text_hard)
self.line_hard.setReadOnly(True)
if __name__=='__main__':
app=QApplication(sys.argv)
win=window()
win.show()
sys.exit(app.exec_())
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
import csv
import os
import re
from glob import glob
from unittest import TestCase, main
from g2p.app import APP
from g2p.cli import convert, doctor, generate_mapping, scan, update
from g2p.log import LOGGER
from g2p.tests.public.data import __file__ as data_dir
class CliTest(TestCase):
"""Test suite for the g2p Command Line Interface"""
def setUp(self):
self.runner = APP.test_cli_runner()
self.data_dir = os.path.dirname(data_dir)
self.langs_to_test = []
for fn in glob(os.path.join(self.data_dir, "*.*sv")):
if fn.endswith("csv"):
delimiter = ","
elif fn.endswith("psv"):
delimiter = "|"
elif fn.endswith("tsv"):
delimiter = "\t"
with open(fn, encoding="utf-8") as csvfile:
reader = csv.reader(csvfile, delimiter=delimiter)
for row in reader:
if len(row) < 4:
LOGGER.warning(
f"Row in {fn} containing values {row} does not have the right values."
f"Please check your data."
)
else:
self.langs_to_test.append(row)
def test_update(self):
result = self.runner.invoke(update)
self.assertEqual(result.exit_code, 0)
def test_convert(self):
LOGGER.info(
f"Running {len(self.langs_to_test)} g2p convert test cases found in public/data"
)
error_count = 0
for tok_option in [["--tok", "--check"], ["--no-tok"]]:
for test in self.langs_to_test:
output_string = self.runner.invoke(
convert, [*tok_option, test[2], test[0], test[1]]
).stdout.strip()
if output_string != test[3].strip():
LOGGER.warning(
f"test_cli.py: {test[0]}->{test[1]} mapping error: '{test[2]}' "
f"should map to '{test[3]}', got '{output_string}' (with {tok_option})."
)
if error_count == 0:
first_failed_test = test + [tok_option]
error_count += 1
if error_count > 0:
reference_string = first_failed_test[3]
output_string = self.runner.invoke(
convert,
[
first_failed_test[4], # tok_option
first_failed_test[2], # word to convert
first_failed_test[0], # in_lang
first_failed_test[1], # out_lang
],
).stdout.strip()
self.assertEqual(
output_string,
reference_string.strip(),
f"{first_failed_test[0]}->{first_failed_test[1]} mapping error "
"for '{first_failed_test[2]}'.\n"
"Look for warnings in the log for any more mapping errors",
)
def test_doctor(self):
result = self.runner.invoke(doctor, "-m fra")
self.assertEqual(result.exit_code, 2)
result = self.runner.invoke(doctor, "-m fra-ipa")
self.assertEqual(result.exit_code, 0)
# Disable this test: it's very slow (8s, just by itself) and does not assert
# anything useful.
# Migrated to test_doctor_expensive.py so we can still run it, manually or via
# ./run.py all.
# result = self.runner.invoke(doctor)
# self.assertEqual(result.exit_code, 0)
# self.assertGreaterEqual(len(result.stdout), 10000)
result = self.runner.invoke(doctor, "-m eng-arpabet")
self.assertEqual(result.exit_code, 0)
self.assertIn("No checks implemented", result.stdout)
def test_doctor_lists(self):
result = self.runner.invoke(doctor, "--list-all")
self.assertEqual(result.exit_code, 0)
self.assertIn("eng-arpabet:", result.stdout)
self.assertIn("eng-ipa:", result.stdout)
result = self.runner.invoke(doctor, "--list-ipa")
self.assertEqual(result.exit_code, 0)
self.assertNotIn("eng-arpabet:", result.stdout)
self.assertIn("eng-ipa:", result.stdout)
def test_scan_fra(self):
result = self.runner.invoke(
scan, ["fra", os.path.join(self.data_dir, "fra_panagrams.txt")]
)
self.assertEqual(result.exit_code, 0)
self.assertLogs(level="WARNING")
diacritics = "àâéèêëîïôùûüç"
for d in diacritics:
self.assertNotIn(d, result.stdout)
unmapped_chars = ":/,'-()2"
for c in unmapped_chars:
self.assertIn(c, result.stdout)
def test_scan_fra_simple(self):
# For now, unit test g2p scan using a simpler piece of French
result = self.runner.invoke(
scan, ["fra", os.path.join(self.data_dir, "fra_simple.txt")]
)
self.assertEqual(result.exit_code, 0)
self.assertLogs(level="WARNING")
diacritics = "àâéèêëîïôùûüç"
for d in diacritics:
self.assertNotIn(d, result.stdout)
unmapped_chars = ":,"
for c in unmapped_chars:
self.assertIn(c, result.stdout)
def test_scan_str_case(self):
result = self.runner.invoke(
scan, ["str", os.path.join(self.data_dir, "str_un_human_rights.txt")]
)
returned_set = re.search("{(.*)}", result.stdout).group(1)
self.assertEqual(result.exit_code, 0)
self.assertLogs(level="WARNING")
unmapped_upper = "FGR"
for u in unmapped_upper:
self.assertIn(u, returned_set)
unmapped_lower = "abcdefghijklqrtwxyz"
for low in unmapped_lower:
self.assertIn(low, returned_set)
mapped_upper = "ABCDEHIJKLMNOPQSTUVWXYZ"
for u in mapped_upper:
self.assertNotIn(u, returned_set)
mapped_lower = "s"
self.assertNotIn(mapped_lower, returned_set)
def test_convert_option_e(self):
result = self.runner.invoke(convert, "-e est fra eng-arpabet")
for s in [
"[['e', 'ɛ'], ['s', 'ɛ'], ['t', 'ɛ']]",
"[['ɛ', 'ɛ']]",
"[['ɛ', 'E'], ['ɛ', 'H'], ['ɛ', ' ']]",
]:
self.assertIn(s, result.stdout)
def test_convert_option_d(self):
result = self.runner.invoke(convert, "-d est fra eng-arpabet")
for s in ["'input': 'est'", "'output': 'ɛ'", "'input': 'ɛ'", "'output': 'EH '"]:
self.assertIn(s, result.stdout)
def test_convert_option_t(self):
result = self.runner.invoke(convert, "-t e\\'i oji oji-ipa")
self.assertIn("eːʔi", result.stdout)
def test_convert_option_tl(self):
result = self.runner.invoke(convert, "--tok-lang fra e\\'i oji oji-ipa")
self.assertIn("eː'i", result.stdout)
def test_generate_mapping_errors(self):
"""Exercise various error situations with the g2p generate-mapping CLI command"""
# We don't exercise valid calls to generate_mapping here. The underlying
# create_mapping() function is tested in test_create_mapping.py, and
# align_to_dummy_fallback() in test_fallback.py, with less expensive
# inputs than our real g2p mappings, and with predictable results.
results = self.runner.invoke(generate_mapping)
self.assertIn("Missing argument", results.output)
results = self.runner.invoke(generate_mapping, "fra")
self.assertIn(
"Nothing to do",
results.output,
'"g2p generate-mapping fra" should say need --ipa or --dummy or --list-dummy',
)
results = self.runner.invoke(generate_mapping, "--ipa foo")
self.assertIn("Invalid value for IN_LANG", results.output)
results = self.runner.invoke(generate_mapping, "--dummy fra foo")
self.assertIn("Invalid value for OUT_LANG", results.output)
results = self.runner.invoke(generate_mapping, "--ipa crl")
self.assertIn("Cannot find IPA mapping", results.output)
results = self.runner.invoke(generate_mapping, "--ipa fra dan-ipa")
self.assertIn("Cannot find IPA mapping", results.output)
results = self.runner.invoke(generate_mapping, "--list-dummy fra")
self.assertIn("Dummy phone inventory", results.output)
results = self.runner.invoke(generate_mapping, "--ipa --dummy fra")
self.assertIn(
"Cannot do both --ipa and --dummy at the same time", results.output
)
results = self.runner.invoke(
generate_mapping, "--out-dir does-not-exist --ipa fra"
)
self.assertIn(
"does not exist",
results.output,
"Non-existent out-dir must be reported as error",
)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
from cli import banner, list_files, menu, device_monitor
from cli import access
import os
def main():
os.system('clear')
print("\n")
banner.display()
print("\n")
###
device_monitor.display()
print("\n")
# password check
# password_check = access.password_check()
# if password_check == True:
# pass
# print("\n")
# list_files.all()
print("\n")
menu.display()
###
print("\n")
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
time=[]
while True:
gols = []
dic={}
dic['nome']=input('nome do jogador: ')
dic['jogos']=int(input(f'numero de partidas de {dic["nome"]}: '))
for l in range(0,dic['jogos']):
gols.append(int(input(f'quantos gols na partida {l+1}: ')))
dic['gols']=gols[:]
dic['total']=sum(gols)
time.append(dic)
sn = input('deseja continuar[S/N]? ')
if sn in 'Nn':
break
print('cod nome gols total')
for l, n in enumerate(time):
print(f'{l:<5} {time[l]["nome"]:^9} {time[l]["gols"]} {time[l]["total"]:>3}')
while True:
x=int(input('qual jogador gostaria de ver os dados? '))
if x== 999:
break
if x > len(time):
print('jogador não encontrado, VSF, TA ME ACHANDO COM CARA DE OTARIO?')
else:
print(f'levantamento do jogador {time[x]["nome"]}')
print(f'O jogador {time[x]["nome"]} jogou um total de {time[x]["jogos"]} jogos')
for l, n in len(time[x]['gols']):
print(f' => Na partida {l}, fez {n} gols')
|
nilq/baby-python
|
python
|
from torch.nn.modules.loss import _Loss
__all__ = ['JointLoss', 'WeightedLoss']
class WeightedLoss(_Loss):
"""Wrapper class around loss function that applies weighted with fixed factor.
This class helps to balance multiple losses if they have different scales
"""
def __init__(self, loss, weight=1.0):
super().__init__()
self.loss = loss
self.weight = weight
def forward(self, *input):
return self.loss(*input) * self.weight
class JointLoss(_Loss):
def __init__(self, first, second, first_weight=1.0, second_weight=1.0):
super().__init__()
self.first = WeightedLoss(first, first_weight)
self.second = WeightedLoss(second, second_weight)
def forward(self, *input):
return self.first(*input) + self.second(*input)
|
nilq/baby-python
|
python
|
import os
import hashlib
from urllib.parse import parse_qs #系统的查询参数解析方法
import jinja2
from DBHelper import DBHelper
from Response import *
# 处理模块
# 读取文件
def load_file(fileName):
try:
with open(fileName, 'rb') as fp:
return fp.read() # 文件存在
except Exception as e:
return b"File not Found" # 文件不存在
# 首页
def index(req):
return render(req,'index.html',{'cookie':req.cookie})
# 登录页面
def login(req):
path = "static/view/login.html"
html = load_file(path)
req.start_response("200 ok", [('ContentType', 'text/html')])
return [html]
#退出登录
def logout(req):
response= Response(req)
response.set_cookie('uid','',expired=-1)
response.set_cookie('username','',expired=-1)
response.header.append(('ContentType','text/html'))
response.req.start_response("200 ok",response.header)
return [b"<html><head><meta http-equiv='refresh' content='0;url=/login'></head><body></body></html>"]
def do_login(req):
# 获取请求方法类型
if req.method == 'GET':
username = req.GET.get('username')
password = req.GET.get('password')
sex = req.GET.get('sex')
print(username,password)
# 业务逻辑处理
password = hashlib.sha1(password.encode('utf8')).hexdigest()
print(password)
db = DBHelper('user')
res = db.where(username=username,password=password).select()
print(db.sql)
print(res)
response = Response(req)
if res:
# 通过验证
uid = res[0]['uid']
username = res[0]['username']
response.set_cookie('uid',uid)
response.set_cookie('username',username)
response.header.append(("ContentType",'text/html'))
response.req.start_response("200 ok",response.header)
return [b"<html><head><meta http-equiv='refresh' content='0;url=/'></head><body></body></html>"]
else:
# 跳转登录页面
return [b"<html><head><meta http-equiv='refresh' content='0;url=/login'></head><body></body></html>"]
# return [b"<meta http-equiv='refresh' content='0;url=/login'>"]
# return [b'dologin']
else: #post
username = req.POST.get('username')
password = req.POST.get('password')
sex = req.POST.get('sex')
print(username,password,sex)
# 业务逻辑处理
req.start_response("200 ok", [('ContentType', 'text/html')])
return [b'world']
def register(req):
pass
# 静态资源
def load_static(req):
path = req.environ.get('PATH_INFO')
print(path)
contentType = {
'.css':'text/css',
'.js' : 'application/x-javascript',
'.png': 'image/png',
'.jpg' : 'image/jpeg',
'.jpeg' : 'image/jpeg',
'.bmp':'image/bmp'
}
rootPath = req.environ.get('root_path')
path = rootPath + path
# 判断路径是否存在
if path and os.path.exists(path):
data = load_file(path)
# 获取文件后缀
ext = os.path.splitext(path)[1].lower() # 文件后缀名
#判断后缀是否在字典中
if ext in contentType:
req.start_response("200 ok", [('ContentType', contentType[ext])])
else:
req.start_response("200 ok", [('ContentType', 'text/html')])
else:
data = b'File Not Found'
req.start_response("200 ok", [('ContentType', 'text/html')])
return [data]
# 学生列表
# def student_list(req):
# db = DBHelper('student')
# data = db.select()
# print(data)
# # 加载学生列表源文件
# html = load_file('static/view/studentlist.html').decode('utf8')
# stu = ""
# # 生成行
# for rec in data:
# stu += "<tr><td>"+rec['sno']+"</td><td>"+rec['sname']+"</td></tr>"
#
# html = html.format(student=stu) #格式化字符串
# print(html)
# req.start_response("200 ok", [('ContentType', 'text/html')])
# return [html.encode('utf8')]
# def student_list(req):
# db = DBHelper('student')
# data = db.select()
# # 实例化加载对象
# env = jinja2.Environment(loader=jinja2.FileSystemLoader("./static/view"))
# template = env.get_template('studentlist.html') #加载模板
# # print(template)
# # 渲染模板文件,生成html源代码
# html = template.render(title='1902学生列表',data=data)
# # print(html)
# req.start_response("200 ok", [('ContentType', 'text/html')])
# return [html.encode('utf8')]
def student_list(req):
db = DBHelper('student')
data = db.select()
return render(req,'studentlist.html',{'title':'1902','data':data})
def student_detail(req,sno):
# sno = req.GET.get('sno')
print(sno)
db = DBHelper('student')
student = db.where(sno=sno).select()
if student:
student = student[0]
return render(req,'studentdetail.html',{'title':student['sname'],'data':student})
else:
return render(req,'404.html')
|
nilq/baby-python
|
python
|
from repl import start
def main() -> None:
print("Hello! This is the Monkey programming language!")
print("Feel free to type in commands")
start()
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : GUC
Case Name : 方式四修改参数enable_save_datachanged_timestamp为off
Description :
步骤1:查询enable_save_datachanged_timestamp默认值
show enable_save_datachanged_timestamp;
步骤2:方式四修改参数enable_save_datachanged_timestamp为off
alter system set enable_save_datachanged_timestamp to off;
show enable_save_datachanged_timestamp;
步骤3:恢复默认值
alter system set enable_save_datachanged_timestamp to on;
Expect :
步骤1:显示默认值on
步骤2:参数设置失败合理报错,校验参数值为on
步骤3:恢复默认值成功
History :
"""
import os
import time
import unittest
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Common import Common
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
from yat.test import Node
from yat.test import macro
LOGGER = Logger()
COMMONSH = CommonSH("PrimaryDbUser")
class GucTestCase(unittest.TestCase):
def setUp(self):
LOGGER.info("==Opengauss_Function_Guc_Run_Statistics_Case0069开始执行==")
self.constant = Constant()
self.common = Common()
self.com = CommonSH("PrimaryDbUser")
self.user_node = Node("PrimaryDbUser")
status = COMMONSH.get_db_cluster_status()
self.assertTrue("Degraded" in status or "Normal" in status)
self.param = "enable_save_datachanged_timestamp"
text = "备份pg_hba.conf文件"
LOGGER.info(text)
self.hba = os.path.join(macro.DB_INSTANCE_PATH, "pg_hba.conf")
result = self.common.get_sh_result(self.user_node,
f"cp {self.hba} {self.hba}backup")
self.assertNotIn("bash", result, "执行失败:" + text)
self.assertNotIn("ERROR", result, "执行失败:" + text)
def test_guc(self):
LOGGER.info(f"--修改参数 确认落盘--")
result = COMMONSH.execute_gsguc("set",
self.constant.GSGUC_SUCCESS_MSG,
f"synchronous_standby_names='*'")
self.assertTrue(result)
result = COMMONSH.execute_gsguc("set",
self.constant.GSGUC_SUCCESS_MSG,
f"synchronous_commit='remote_apply'")
self.assertTrue(result)
result = COMMONSH.execute_gsguc("set",
self.constant.GSGUC_SUCCESS_MSG,
f"hot_standby=on")
self.assertTrue(result)
result = COMMONSH.execute_gsguc("set",
self.constant.GSGUC_SUCCESS_MSG,
f"wal_level='hot_standby'")
self.assertTrue(result)
status = COMMONSH.restart_db_cluster()
LOGGER.info(status)
status = COMMONSH.get_db_cluster_status()
LOGGER.info(status)
self.assertTrue("Normal" in status or "Degraded" in status)
LOGGER.info(f"--step1:查询{self.param};expect:默认值on")
sql_cmd = COMMONSH.execut_db_sql(f"show {self.param};")
LOGGER.info(sql_cmd)
self.assertNotIn(self.constant.SQL_WRONG_MSG[1], sql_cmd)
self.assertEqual("on", sql_cmd.splitlines()[-2].strip())
LOGGER.info(f"--step2:方式四修改参数{self.param}为off;expect:失败")
sql_cmd = COMMONSH.execut_db_sql(f"alter system "
f"set {self.param} to off;show {self.param};")
self.assertIn("ERROR", sql_cmd)
self.assertIn("on\n", sql_cmd)
LOGGER.info("创建表,执行insert/update/delete, "
"exchange/truncate/drop partition,查看last_data_changed被统计")
sql_cmd = COMMONSH.execut_db_sql('''drop table if exists test;\
create table test(c_int int) PARTITION BY RANGE(c_int)\
(PARTITION P1 VALUES LESS THAN(100),\
PARTITION P2 VALUES LESS THAN(200),\
PARTITION P3 VALUES LESS THAN(300));\
insert into test values(11),(12),(285),(286);\
drop table if exists test1;create table test1(c_int int);\
insert into test1 values(201),(205),(208);
''')
LOGGER.info(sql_cmd)
self.assertNotIn("ERROR", sql_cmd)
self.assertIn(self.constant.CREATE_TABLE_SUCCESS, sql_cmd)
sql_list = ["alter table test truncate PARTITION p1;",
"insert into test values(1),(2),(255),(256);",
"update test set c_int = 5 where c_int = 1;",
"delete from test where c_int = 2;",
"alter table test EXCHANGE PARTITION (P3) "
"WITH TABLE test1;",
"alter table test DROP PARTITION P2;"]
for sql in sql_list:
LOGGER.info("查询执行前last_data_changed")
sql_cmd = COMMONSH.execut_db_sql("select last_data_changed "
"from PG_STAT_ALL_TABLES where relname='test';")
LOGGER.info(sql_cmd)
self.assertNotIn("ERROR", sql_cmd)
data1 = sql_cmd.splitlines()[-2].strip()
time.sleep(3)
LOGGER.info("--校验功能")
sql_cmd = COMMONSH.execut_db_sql(f'''{sql}''')
LOGGER.info(sql_cmd)
self.assertNotIn("ERROR", sql_cmd)
LOGGER.info("查询执行后last_data_changed")
sql_cmd = COMMONSH.execut_db_sql("select last_data_changed "
"from PG_STAT_ALL_TABLES where relname='test';")
LOGGER.info(sql_cmd)
self.assertNotIn("ERROR", sql_cmd)
data2 = sql_cmd.splitlines()[-2].strip()
LOGGER.info("判断前后时间不同 操作时间已更新")
self.assertNotEqual(data1, data2)
def tearDown(self):
LOGGER.info("--step3:恢复默认值;expect:成功")
LOGGER.info(f"恢复pg_hba.conf文件")
cmd_result = self.common.get_sh_result(self.user_node,
f"mv {self.hba}backup {self.hba}")
LOGGER.info(cmd_result)
sql_result = COMMONSH.execut_db_sql("drop table test cascade;"
"drop table test1 cascade;")
LOGGER.info(sql_result)
COMMONSH.execute_gsguc("reload",
self.constant.GSGUC_SUCCESS_MSG,
f"{self.param}='on'")
COMMONSH.restart_db_cluster()
result = COMMONSH.execut_db_sql(f"show {self.param};")
LOGGER.info(result)
status = COMMONSH.get_db_cluster_status()
self.assertTrue("on\n" in result)
self.assertNotIn("ERROR", sql_result)
self.assertTrue("Degraded" in status or "Normal" in status)
LOGGER.info("==Opengauss_Function_Guc_Run_Statistics_Case0069执行结束==")
|
nilq/baby-python
|
python
|
from tests.package.test_perl import TestPerlBase
class TestPerlDBDmysql(TestPerlBase):
"""
package:
DBD-mysql XS
direct dependencies:
DBI XS
"""
config = TestPerlBase.config + \
"""
BR2_PACKAGE_PERL=y
BR2_PACKAGE_PERL_DBD_MYSQL=y
"""
def test_run(self):
self.login()
self.module_test("DBI")
self.module_test("DBD::mysql")
|
nilq/baby-python
|
python
|
MIXNODE_CONFIG = {
'api_key': 'your api key' # available at https://www.mixnode.com/account/api
};
|
nilq/baby-python
|
python
|
from rest_framework import serializers
# from rest_framework_recursive.fields import RecursiveField
from . import models
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = models.Tag
fields = ('title', )
class ExperienceSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True)
user_name = serializers.CharField(source='user.username', read_only=True)
tags = serializers.StringRelatedField(many=True)
class Meta:
model = models.Experience
fields = ['pk', 'user', 'user_name', 'title', 'content', 'publish_date', 'tags']
class CommentSerializer(serializers.ModelSerializer):
user_name = serializers.CharField(source="user.username", read_only=True)
post_title = serializers.CharField(source="post.title", read_only=True)
class Meta:
model = models.Comment
fields = ('pk', 'user', 'user_name', 'text', 'created_date', 'post', 'post_title')
|
nilq/baby-python
|
python
|
import csv
from urllib.request import Request, urlopen
import urllib.error
import dateutil.parser
import re
from os import system
from sys import argv
from bs4 import BeautifulSoup
import scrape_util
default_sale, base_url, prefix = scrape_util.get_market(argv)
temp_raw = scrape_util.ReportRaw(argv, prefix)
#report_path = ['/market-reports.html', '/2013-market-reports-2.html', '/2013-market-reports.html', '/2012-reports.html', '/2011-reports.html']
report_path = ['/market-reports.html', '/2013-market-reports-2.html', '/2013-market-reports.html', '/2012-reports.html']
strip_char = ';,. \n\t'
def get_sale_date(date_string):
"""Return the date of the sale."""
date_string = date_string.replace('\xa0',' ')
match = re.search(r'& ?[0-9]+', date_string)
if match:
date_string = date_string.replace(match.group(),'')
sale_date = dateutil.parser.parse(date_string)
# Special Case
if sale_date.year == 201:
sale_date = sale_date.replace(year = 2014)
return sale_date
def is_sale(this_line):
"""Determine whether a given line describes a sale of cattle."""
is_not_succinct = len(re.split(r'\.{2,}', this_line)) > 2
has_number = re.search(r'[0-9]+', this_line)
start_with_number = re.match(r'[0-9]+', this_line)
return bool(has_number and is_not_succinct and not start_with_number)
def is_number(string):
"""Test whether a string is number-ish. Ignoring units like 'cwt' and 'hd'."""
if string:
string = re.sub(r'\$|[,-/()]|cwt|he?a?d?|pr?|avg\.?', '', string, flags = re.IGNORECASE)
try:
float(string)
result = True
except ValueError:
result = False
else:
result = False
return result
def get_sale(word):
"""Convert the input into a dictionary, with keys matching
the CSV column headers in the scrape_util module.
"""
number_word = [idx for idx, val in enumerate(word) if is_number(val)]
if len(number_word) == 0:
return {}
sale = {
'consignor_name': word[0].strip(strip_char).title(),
}
cattle_weight_list = word[1].split(',')
if len(cattle_weight_list) > 1:
weight_string = cattle_weight_list.pop().strip()
weight_string = weight_string.replace('#','').strip(strip_char)
try:
float(weight_string)
sale['cattle_avg_weight'] = weight_string
except ValueError:
pass
cattle_string = ','.join(cattle_weight_list).strip()
head_match = re.match(r'([0-9,]+)' ,cattle_string)
if head_match:
head_string = head_match.group(1).replace(',','').strip(strip_char)
try:
int(head_string)
sale['cattle_head'] = head_string
except ValueError:
pass
cattle_string = cattle_string.replace(head_match.group(1), '').strip(strip_char)
sale['cattle_cattle'] = cattle_string
price_string = word[number_word.pop()]
match = False
if not match:
match = re.search(r'([0-9,.]+) ?/?(he?a?d?|pr?)', price_string, re.IGNORECASE)
key = 'cattle_price'
if not match:
match = re.search(r'([0-9,.]+) ?/?c?w?t?', price_string, re.IGNORECASE)
key = 'cattle_price_cwt'
if match:
sale[key] = match.group(1).replace(',', '').strip(strip_char)
sale = {k:v for k,v in sale.items() if v}
return sale
def write_sale(line, this_default_sale, writer):
"""Extract sales from a list of report lines and write them to a CSV file."""
consignor_name = ''
for this_line in line:
if is_sale(this_line):
sale = this_default_sale.copy()
word = re.split(r'\.{2,}', this_line)
if not re.match(r'\.{2,}', this_line):
match = re.match(r'(.+?)\.{2,}', this_line)
if match:
consignor_name = match.group(1)
# Assign consignor name of previous row if consignor name not found
else:
word[0] = consignor_name
sale.update(get_sale(word))
if sale != this_default_sale:
writer.writerow(sale)
def main():
for this_report_path in report_path:
# Collect individual reports into a list
request = Request(
base_url + this_report_path,
headers = scrape_util.url_header,
)
with urlopen(request) as io:
soup = BeautifulSoup(io.read(), 'lxml')
content = soup.find('div', itemprop = 'articleBody')
report = content.find_all('a')
# Locate existing CSV files
archive = scrape_util.ArchiveFolder(argv, prefix)
# Write a CSV file for each report not in the archive
for this_report in report:
sale_date = get_sale_date(this_report.get_text())
io_name = archive.new_csv(sale_date)
if not io_name:
continue
# Initialize the default sale dictionary
this_default_sale = default_sale.copy()
this_default_sale.update({
'sale_year': sale_date.year,
'sale_month': sale_date.month,
'sale_day': sale_date.day,
})
# create temporary text file from downloaded pdf
pdf_url = base_url + this_report['href']
request = Request(
pdf_url,
headers = scrape_util.url_header,
)
try:
with urlopen(request) as io:
response = io.read()
except urllib.error.HTTPError:
continue
with temp_raw.open('wb') as io:
io.write(response)
system(scrape_util.pdftotext.format(str(temp_raw)))
# read sale text into line list
temp_txt = temp_raw.with_suffix('.txt')
if scrape_util.platform=='win32':
read_errors = 'ignore'
else:
read_errors = None
with temp_txt.open('r', errors=read_errors) as io:
original_line = list(this_line.strip() for this_line in io)
temp_raw.clean()
split_index = 110
# Look for line with two sales and the index to split the line into two columns
for this_line in original_line:
match = re.search(r'(\.{2,} *[0-9,]+).+?( {3,}).+?(\.{2,} *[0-9,]+)', this_line)
if match:
split_index = this_line.find(match.group(2)) + len(match.group(2))
break
column1 = list(this_line[0:split_index].strip() for this_line in original_line)
column2 = list(this_line[split_index:].strip() for this_line in original_line)
line = column1 + column2
# Open a new CSV file and write each sale
with io_name.open('w', encoding='utf-8') as io:
writer = csv.DictWriter(io, scrape_util.header, lineterminator='\n')
writer.writeheader()
write_sale(line, this_default_sale, writer)
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-07 13:59
from __future__ import unicode_literals
from django.db import migrations, models
import sendinblue.forms
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
import wagtail.wagtailembeds.blocks
import wagtail.wagtailimages.blocks
class Migration(migrations.Migration):
dependencies = [
('sendinblue', '0002_form_introduction'),
]
operations = [
migrations.AddField(
model_name='sendinblueform',
name='confirm_template',
field=models.IntegerField(blank=True, help_text='Send a confirmation mail to the user using this template', null=True, verbose_name='Confirmation template'),
),
migrations.AddField(
model_name='sendinblueform',
name='notify_template',
field=models.IntegerField(blank=True, help_text='Send a notification mail using this template. The notify mail should be defined in SendInBlue settings', null=True, verbose_name='Notify template'),
),
migrations.AddField(
model_name='sendinbluesettings',
name='notify_email',
field=models.EmailField(blank=True, help_text='Notification mail will be sent to this email', max_length=255, null=True, verbose_name='Notification email'),
),
migrations.AlterField(
model_name='sendinblueform',
name='definition',
field=wagtail.wagtailcore.fields.StreamField((('text_field', wagtail.wagtailcore.blocks.StructBlock((('label', wagtail.wagtailcore.blocks.CharBlock(help_text='The text displayed aside the field', label='Label', max_length=255, required=False)), ('required', wagtail.wagtailcore.blocks.BooleanBlock(default=True, label='Required', required=False)), ('attribute', sendinblue.forms.SendInBlueAttributeBlock(required=True)), ('placeholder', wagtail.wagtailcore.blocks.CharBlock(help_text='The text displayed inside the field when empty', label='Placeholder', max_length=255, required=False))))), ('textarea', wagtail.wagtailcore.blocks.StructBlock((('label', wagtail.wagtailcore.blocks.CharBlock(help_text='The text displayed aside the field', label='Label', max_length=255, required=False)), ('required', wagtail.wagtailcore.blocks.BooleanBlock(default=True, label='Required', required=False)), ('rows', wagtail.wagtailcore.blocks.IntegerBlock(default=3, label='Rows', required=True)), ('attribute', wagtail.wagtailcore.blocks.CharBlock(default='message', help_text='The attribute used for transactional template', label='Attribute', max_length=255, required=True)), ('placeholder', wagtail.wagtailcore.blocks.CharBlock(help_text='The text displayed inside the field when empty', label='Placeholder', max_length=255, required=False))))), ('text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('html', wagtail.wagtailcore.blocks.RawHTMLBlock()), ('embed', wagtail.wagtailembeds.blocks.EmbedBlock()))),
),
]
|
nilq/baby-python
|
python
|
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
import pdb
def init_weights(m):
if isinstance(m, nn.Conv2d):
torch.nn.init.xavier_uniform_(m.weight)
m.bias.data.fill_(0.01)
class UNet(nn.Module):
def __init__(self, init_weights=True):
super(UNet, self).__init__() #252 * 252 * 3 - input
self.conv1 = nn.Conv2d(3, 64, kernel_size=3) #250 * 250 * 64
self.conv2 = nn.Conv2d(64, 64, kernel_size=3) #248 * 248 * 64
#after maxpool 124 * 124 * 64
self.conv3 = nn.Conv2d(64, 128, kernel_size=3) #122 * 122 * 128
self.conv4 = nn.Conv2d(128, 128, kernel_size=3) #120 * 120 * 128
#after maxpool 60 * 60 * 128
self.conv5 = nn.Conv2d(128, 256, kernel_size=3) #58 * 58 * 256
self.conv6 = nn.Conv2d(256, 256, kernel_size=3) #56 * 56 * 256
#after maxpool 28 * 28 * 256
self.conv7 = nn.Conv2d(256, 512, kernel_size=3) #26 * 26 * 512
self.conv8 = nn.Conv2d(512, 512, kernel_size=3) #24 * 24 * 512
#after maxpool 12 * 12 * 512
self.conv9 = nn.Conv2d(512, 1024, kernel_size=3) #10 * 10 * 1024
self.conv10 = nn.Conv2d(1024, 1024, kernel_size=3) #8 * 8 * 1024
self.upsample = nn.Upsample(scale_factor=2, mode='nearest') #16 * 16 * 1024
self.upConv1 = nn.Conv2d(1024, 512, kernel_size=1) #16 * 16 * 512
#after concatenation with crop of 16 * 16 * 512
#16 * 16 * 1024
self.deConv1 = nn.Conv2d(1024, 512, kernel_size = 3) #14 * 14 * 512
#self.conv8 #12 * 12 * 512
#upsample 1 #24 * 24 * 512
self.upConv2 = nn.Conv2d(512, 256, kernel_size=1) #24 * 24 * 256
#after concatenation with crop of 24 * 24 * 256
#24 * 24 * 512
self.deConv2 = nn.Conv2d(512, 256, kernel_size = 3) #22 * 22 * 256
#self.conv6 #20 * 20 * 256
#upsample 1 #40 * 40 * 256
self.upConv3 = nn.Conv2d(256, 128, kernel_size=1) #40 * 40 * 128
#after concatenation with crop of 40 * 40 * 128
#40 * 40 * 256
self.deConv3 = nn.Conv2d(256, 128, kernel_size = 3) #38 * 38 * 128
#self.conv4 #36 * 36 * 128
#upsample 1 #72 * 37 * 128
self.upConv4 = nn.Conv2d(128, 64, kernel_size=1) #72 * 72 * 64
#after concatenation with crop of 72 * 72 * 64
#72 * 72 * 128
self.deConv4 = nn.Conv2d(128, 64, kernel_size = 3) #70 * 70 * 64
#self.conv2 #68 * 68 * 64
self.deConv5 = nn.Conv2d(64, 1, kernel_size = 1) #68 * 68 * 1
self.pool = nn.MaxPool2d(kernel_size=2, stride=2)
if init_weights:
self._initialize_weights()
def forward(self, x):
#H * W * c
#252 * 252 * 3
out1 = F.relu(self.conv1(x)) #250 * 250 * 64
#print('out1:{}'.format(out1.shape))
out2 = F.relu(self.conv2(out1)) #248 * 248 * 64
#print('out2:{}'.format(out2.shape))
out3 = self.pool(out2) #124 * 124 * 64
#print('out3:{}'.format(out3.shape))
out4 = F.relu(self.conv3(out3)) #122 * 122 * 128
#print('out4:{}'.format(out4.shape))
out5 = F.relu(self.conv4(out4)) #120 * 120 * 128
#print('out5:{}'.format(out5.shape))
out6 = self.pool(out5) #60 * 60 * 128
#print('out6:{}'.format(out6.shape))
out7 = F.relu(self.conv5(out6)) #58 * 58 * 256
#print('out7:{}'.format(out7.shape))
out8 = F.relu(self.conv6(out7)) #56 * 56 * 256
#print('out8:{}'.format(out8.shape))
out9 = self.pool(out8) #28 * 28 * 256
#print('out9:{}'.format(out9.shape))
out10 = F.relu(self.conv7(out9)) #26 * 26 * 512
#print('out10:{}'.format(out10.shape))
out11 = F.relu(self.conv8(out10)) #24 * 24 * 512
#print('out11:{}'.format(out11.shape))
out12 = self.pool(out11) #12 * 12 * 512
#print('out12:{}'.format(out12.shape))
out13 = F.relu(self.conv9(out12)) #10 * 10 * 1024
#print('out13:{}'.format(out13.shape))
out14 = F.relu(self.conv10(out13)) #8 * 8 * 1024
#print('out14:{}'.format(out14.shape))
out15 = self.upsample(out14) #16 * 16 * 1024
#print('out15:{}'.format(out15.shape))
out16 = self.upConv1(out15) #16 * 16 * 512
#print('out16:{}'.format(out16.shape))
out16_bypass = out11[:,:,4:20,4:20]
#print('out16:{}'.format(out16.shape))
out17 = torch.cat((out16, out16_bypass), 1) #16 * 16 * 1024
#print('out17:{}'.format(out17.shape))
out18 = F.relu(self.deConv1(out17)) #14 * 14 * 512
#print('out18:{}'.format(out18.shape))
out19 = F.relu(self.conv8(out18)) #12 * 12 * 512
#print('out19:{}'.format(out19.shape))
out20 = self.upsample(out19) #24 * 24 * 512
#print('out20:{}'.format(out20.shape))
out21 = self.upConv2(out20) #24 * 24 * 256
#print('out21:{}'.format(out21.shape))
out21_bypass = out8[:, :, 16:40, 16:40] #24 * 24 * 256
#print('out21_bypass:{}'.format(out21_bypass.shape))
out22 = torch.cat((out21, out21_bypass), 1) #24 * 24 * 512
#print('out22:{}'.format(out22.shape))
out23 = F.relu(self.deConv2(out22)) #22 * 22 * 256
#print('out23:{}'.format(out23.shape))
out24 = F.relu(self.conv6(out23)) #20 * 20 * 256
#print('out24:{}'.format(out24.shape))
out25 = self.upsample(out24) #40 * 40 * 256
#print('out25:{}'.format(out25.shape))
out26 = self.upConv3(out25) #40 * 40 * 128
#print('out26:{}'.format(out26.shape))
out26_bypass = out5[:, :, 40:80, 40:80] #40 * 40 * 128
#print('out26_bypass:{}'.format(out26_bypass.shape))
out27 = torch.cat((out26, out26_bypass), 1) #40 * 40 * 256
#print('out27:{}'.format(out27.shape))
out28 = F.relu(self.deConv3(out27)) #38 * 38 * 128
#print('out28:{}'.format(out28.shape))
out29 = F.relu(self.conv4(out28)) #36 * 36 * 128
#print('out29:{}'.format(out29.shape))
out30 = self.upsample(out29) #72 * 72 * 128
#print('out30:{}'.format(out30.shape))
out31 = self.upConv4(out30) #72 * 72 * 64
#print('out31:{}'.format(out31.shape))
out31_bypass = out2[:, :, 88:160, 88:160] #72 * 72 * 64
#print('out31_bypass:{}'.format(out31_bypass.shape))
out32 = torch.cat((out31, out31_bypass), 1) #72 * 72 * 128
#print('out32:{}'.format(out32.shape))
out33 = self.deConv4(out32) #70 * 70 * 64
#print('out33:{}'.format(out33.shape))
out34 = self.conv2(out33) #68 * 68 * 64
#print('out34:{}'.format(out34.shape))
out35 = self.deConv5(out34) #68 * 68 * 1
#print('out35:{}'.format(out35.shape))
return out35
def _initialize_weights(self):
self.apply(init_weights)
|
nilq/baby-python
|
python
|
from .. import ssl
from . import base
class LocalCa(base.CaManager):
"""Class implementing a certificate authority based on a private key retrieved from CA storage
"""
def __init__(
self,
ca_config,
staging=True,
storage_api=None,
ca_private_key=None,
ca_certificate=None,
certificate_validity_days=90,
**kwargs
):
super(LocalCa, self).__init__(ca_config, staging=staging, storage_api=storage_api)
self.certificate_validity_days = certificate_validity_days
self.ca_private_key = self.storage_api.retrieve_data(
name=ca_private_key,
data_type=ssl.DataType.PrivateKey,
)
self.ca_certificate = self.storage_api.retrieve_data(
name=ca_certificate,
data_type=ssl.DataType.Certificate,
)
def get_signed_certificate(self, ssl_blueprint=None, csr_path=None, servers_api=None):
return ssl.sign(
csr=csr_path.read_bytes(),
ca_key=self.ca_private_key,
ca_cert=self.ca_certificate,
validity_days=self.certificate_validity_days,
)
@property
def is_automated_renewal_supported(self):
return True
|
nilq/baby-python
|
python
|
from autode.transition_states.ts_guess import TSguess
from autode.transition_states.transition_state import TransitionState
__all__ = ['TSguess',
'TransitionState']
|
nilq/baby-python
|
python
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
import torch
from detectron2.layers import batched_nms
from detectron2.utils.env import TORCH_VERSION
class TestNMS(unittest.TestCase):
def _create_tensors(self, N):
boxes = torch.rand(N, 4) * 100
# Note: the implementation of this function in torchvision is:
# boxes[:, 2:] += torch.rand(N, 2) * 100
# but it does not guarantee non-negative widths/heights constraints:
# boxes[:, 2] >= boxes[:, 0] and boxes[:, 3] >= boxes[:, 1]:
boxes[:, 2:] += boxes[:, :2]
scores = torch.rand(N)
return boxes, scores
@unittest.skipIf(TORCH_VERSION < (1, 6), "Insufficient pytorch version")
def test_nms_scriptability(self):
N = 2000
num_classes = 50
boxes, scores = self._create_tensors(N)
idxs = torch.randint(0, num_classes, (N,))
scripted_batched_nms = torch.jit.script(batched_nms)
err_msg = "NMS is incompatible with jit-scripted NMS for IoU={}"
for iou in [0.2, 0.5, 0.8]:
keep_ref = batched_nms(boxes, scores, idxs, iou)
backup = boxes.clone()
scripted_keep = scripted_batched_nms(boxes, scores, idxs, iou)
assert torch.allclose(boxes, backup), "boxes modified by jit-scripted batched_nms"
self.assertTrue(torch.equal(keep_ref, scripted_keep), err_msg.format(iou))
if __name__ == "__main__":
unittest.main()
|
nilq/baby-python
|
python
|
from .c_distribution_gaussian import CDistributionGaussian
from .c_density_estimation import CDensityEstimation
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import json
import sys
# http://docs.python.org/2/library/urlparse.html
from urlparse import urlparse
if len(sys.argv) < 2:
print "Usage:", sys.argv[0], "<file.json>"
raise SystemExit
data = open(sys.argv[1]).read()
har = json.loads(data)
domain_map = {}
for e in har['log']['entries']:
url = e['request']['url']
o = urlparse(url)
# Create list at key if not already present
domain_map[o.netloc] = domain_map.get(o.netloc, [])
domain_map[o.netloc].append(url)
for d, list in domain_map.iteritems():
print d
for u in list:
print "\t", u[:30]
|
nilq/baby-python
|
python
|
"""Audio Overlay Tool
Usage:
aot.py <input_dir> <output_dir> <num_generate> <samples_per_sample> [options]
Options:
-f RGX --filter=RGX a filter for selecting the input files from the input directory.
-o FMT --outfmt=FMT Output format of the files (file a information in {a+cg} file b information in {b+cg}
For Example:
a=bass-something.wav b=bass-something_else.wav c=...
filter=(bass)-(.+)(\.wav)
outfmt=remixed-{a1}-{a2}+{b2}{a3}
then result => remixed-bass-something+something_else.wav
Note that a0 and b0 are the full match.
-h --help Show this screen.
--version Show version.
"""
import asyncio
from docopt import docopt
from driver import Remixer
async def main():
arguments = docopt(__doc__, version="0.0.1-alpha0")
driver = Remixer(args=arguments)
await driver.run()
if __name__ == "__main__":
asyncio.run(main())
|
nilq/baby-python
|
python
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from ._test_base import _SendTest
from azure_devtools.perfstress_tests import get_random_bytes
from azure.servicebus import Message
from azure.servicebus.aio import Message as AsyncMessage
class LegacySendMessageTest(_SendTest):
def __init__(self, arguments):
super().__init__(arguments)
self.data = get_random_bytes(self.args.message_size)
def run_sync(self):
message = Message(self.data)
self.sender.send(message)
async def run_async(self):
message = AsyncMessage(self.data)
await self.async_sender.send(message)
|
nilq/baby-python
|
python
|
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from perfkitbenchmarker import flags
flags.DEFINE_string('openstack_cli_path',
default='openstack',
help='The path to the OpenStack CLI binary.')
flags.DEFINE_string('openstack_nova_path',
default='nova',
help='The path to the Nova CLI binary.')
flags.DEFINE_string('openstack_neutron_path',
default='neutron',
help='The path to the Neutron CLI binary.')
flags.DEFINE_string('openstack_additional_flags',
default=[],
help='Additional flags to pass to every OpenStack CLI '
'command. See "openstack --help" for more.')
flags.DEFINE_string('openstack_public_network', None,
'(DEPRECATED: Use openstack_floating_ip_pool) '
'Name of OpenStack public network.')
flags.DEFINE_string('openstack_private_network', 'private',
'(DEPRECATED: Use openstack_network) '
'Name of OpenStack private network.')
flags.DEFINE_string('openstack_network', 'private',
'Name of OpenStack network. This network provides '
'automatically allocated fixed-IP addresses to attached '
'instances. Typically, this network is used for internal '
'communication between instances. '
'If openstack_floating_ip_pool is not '
'set then this network will be used to communicate with '
'the instance.')
flags.DEFINE_string('openstack_floating_ip_pool', None,
'Name of OpenStack floating IP-address pool. If set, '
'a floating-ip address from this pool will be associated'
'to each instance and will be used for communicating '
'with it. To use this flag, an internally routable network '
'must also be specified via the openstack_network flag.')
flags.DEFINE_boolean('openstack_config_drive', False,
'Add possibilities to get metadata from external drive')
flags.DEFINE_boolean('openstack_boot_from_volume', False,
'Boot from volume instead of an image')
flags.DEFINE_integer('openstack_volume_size', None,
'Size of the volume (GB)')
flags.DEFINE_string('openstack_image_username', 'ubuntu',
'Ssh username for cloud image')
NONE = 'None'
flags.DEFINE_enum('openstack_scheduler_policy', NONE,
[NONE, 'affinity', 'anti-affinity'],
'Add possibility to use affinity or anti-affinity '
'policy in scheduling process')
|
nilq/baby-python
|
python
|
import unittest
from http import HTTPStatus
from test.flask_test_app import create_app
class TestRequestArg(unittest.TestCase):
def assertInHTML(self, value, response):
HTML_text = response.data.decode("utf-8")
self.assertIn(value, HTML_text)
def setUp(self) -> None:
_app = create_app()
self.app = _app.test_client()
def test_post(self):
float_value = 123.456
int_value = 43987439
r = self.app.post(
"/post", data=dict(int_value=int_value, float_value=float_value)
)
self.assertEqual(HTTPStatus.OK, r.status_code)
self.assertInHTML(f"int_value:{int_value}", r)
self.assertInHTML(f"float_value:{float_value}", r)
def test_put_json(self):
float_value = 123.456
int_value = 43987439
r = self.app.put(
"/put_json",
json=dict(int_value=int_value, float_value=float_value),
content_type="application/json",
)
self.assertEqual(HTTPStatus.OK, r.status_code, r.data)
self.assertInHTML(f"int_value:{int_value}", r)
self.assertInHTML(f"float_value:{float_value}", r)
def test_get(self):
float_value = 123.456
int_value = 43987439
string_value = "o4iuuo34u390jsfdsf"
optional_string_value = "ooiiu43hssh"
r = self.app.get(
"/get", data=dict(int_value=int_value, float_value=float_value)
)
self.assertEqual(HTTPStatus.OK, r.status_code)
self.assertInHTML(f"int_value:{int_value}", r)
self.assertInHTML(f"float_value:{float_value}", r)
# string
r = self.app.get("/get_string", data=dict(string_value=string_value))
self.assertEqual(HTTPStatus.OK, r.status_code)
self.assertInHTML(f"string_value:{string_value}", r)
self.assertInHTML(f"<p>optional_string_value:</p>", r)
# optional value
r = self.app.get(
"/get_string",
data=dict(
string_value=string_value, optional_string_value=optional_string_value
),
)
self.assertEqual(HTTPStatus.OK, r.status_code)
self.assertInHTML(f"string_value:{string_value}", r)
self.assertInHTML(f"<p>optional_string_value:{optional_string_value}</p>", r)
r = self.app.post("/post_string", data=dict(string_value=string_value))
self.assertEqual(HTTPStatus.OK, r.status_code)
self.assertInHTML(f"string_value:{string_value}", r)
def test_argument_missing(self):
float_value = 123.456
int_value = 43987439
# GET
r = self.app.get("/get", data=dict(int_value=int_value))
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertEqual(b"Required argument missing: float_value", r.data)
r = self.app.get("/get", data=dict(float_value=float_value))
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertEqual(b"Required argument missing: int_value", r.data)
# POST
r = self.app.post("/post", data=dict(int_value=int_value))
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertEqual(b"Required argument missing: float_value", r.data)
r = self.app.post("/post", data=dict(float_value=float_value))
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertEqual(b"Required argument missing: int_value", r.data)
# PUT json
r = self.app.put(
"/put_json",
json=dict(float_value=float_value),
content_type="application/json",
)
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertEqual(b"Required argument missing: int_value", r.data)
def test_invalid_cast(self):
float_value = 123.456
int_value = 43987439
# GET
r = self.app.get(
"/get", data=dict(int_value=float_value, float_value=float_value)
)
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertIn(b"Required argument failed type conversion: int_value", r.data)
r = self.app.get("/get", data=dict(float_value="hello", int_value=int_value))
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertIn(b"Required argument failed type conversion: float_value", r.data)
# POST
r = self.app.post(
"/post", data=dict(int_value=float_value, float_value=float_value)
)
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertIn(b"Required argument failed type conversion: int_value", r.data)
r = self.app.post("/post", data=dict(float_value="hello", int_value=int_value))
self.assertEqual(HTTPStatus.BAD_REQUEST, r.status_code)
self.assertIn(b"Required argument failed type conversion: float_value", r.data)
def test_readme_example(self):
r = self.app.get("/area_of_a_circle", data=dict(radius=1))
self.assertEqual(b"3.14", r.data)
r = self.app.put("/area_of_a_circle", json=dict(radius=1))
self.assertEqual(b"3.14", r.data)
r = self.app.post("/area_of_a_circle", data=dict(radius=1))
self.assertEqual(b"3.14", r.data)
r = self.app.put("/area_of_a_circle", data=dict(radius=1))
self.assertEqual(b"3.14", r.data)
def test_custom_arg_type(self):
r = self.app.get("/custom_arg_type", data=dict(arg_type=True))
self.assertEqual(b"yes", r.data)
r = self.app.put(
"/custom_arg_type",
json=dict(arg_type=False),
content_type="application/json",
)
self.assertEqual(b"no", r.data)
r = self.app.put("/custom_arg_type", json=dict(arg_type="False"))
self.assertEqual(b"no", r.data)
r = self.app.put("/custom_arg_type", json=dict(arg_type="True"))
self.assertEqual(b"yes", r.data)
r = self.app.put("/custom_arg_type", json=dict(arg_type="true"))
self.assertEqual(b"no", r.data)
def test_get_json_arg_form(self):
float_value = 123.456
int_value = 43987439
r = self.app.get(
f"/get?int_value={int_value}", json=dict(float_value=float_value)
)
self.assertEqual(HTTPStatus.OK, r.status_code, r.data)
self.assertInHTML(f"int_value:{int_value}", r)
self.assertInHTML(f"float_value:{float_value}", r)
|
nilq/baby-python
|
python
|
#%%
## command-line arguments
import argparse
parser = argparse.ArgumentParser(description="Runner script that can take command-line arguments")
parser.add_argument("-i", "--input", help="Path to a FASTA file. Required.", required=True)
parser.add_argument("-o", "--output_dir", default="", type=str,
help="Path to a directory that will store the results. "
"The default name is 'prediction_<hash>'. ")
parser.add_argument("-h", "--homooligomer", default="1", type=str,
help="homooligomer: Define number of copies in a homo-oligomeric assembly. "
"For example, sequence:ABC:DEF, homooligomer: 2:1, "
"the first protein ABC will be modeled as a homodimer (2 copies) and second DEF a monomer (1 copy). Default is 1.")
parser.add_argument("-m", "--msa_method", default="mmseqs2", type=str, choices=["mmseqs2", "single_sequence"],
help="Options to generate MSA."
"mmseqs2 - FAST method from ColabFold (default) "
"single_sequence - use single sequence input."
"Default is 'mmseqs2'.")
parser.add_argument("-p", "--pair_mode", default="unpaired", choices=["unpaired", "unpaired+paired", "paired"],
help="Experimental option for protein complexes. "
"Pairing currently only supported for proteins in same operon (prokaryotic genomes). "
"unpaired - generate separate MSA for each protein. (default) "
"unpaired+paired - attempt to pair sequences from the same operon within the genome. "
"paired - only use sequences that were successfully paired. "
"Default is 'unpaired'.")
parser.add_argument("-pc", "--pair_cov", default=50, type=int,
help="Options to prefilter each MSA before pairing. It might help if there are any paralogs in the complex. "
"prefilter each MSA to minimum coverage with query (%%) before pairing. "
"Default is 50.")
parser.add_argument("-pq", "--pair_qid", default=20, type=int,
help="Options to prefilter each MSA before pairing. It might help if there are any paralogs in the complex. "
"prefilter each MSA to minimum sequence identity with query (%%) before pairing. "
"Default is 20.")
parser.add_argument("-b", "--rank_by", default="pLDDT", type=str, choices=["pLDDT", "pTMscore"],
help="specify metric to use for ranking models (For protein-protein complexes, we recommend pTMscore). "
"Default is 'pLDDT'.")
parser.add_argument("-t", "--use_turbo", action='store_true',
help="introduces a few modifications (compile once, swap params, adjust max_msa) to speedup and reduce memory requirements. "
"Disable for default behavior.")
parser.add_argument("-mm", "--max_msa", default="512:1024", type=str,
help="max_msa defines: max_msa_clusters:max_extra_msa number of sequences to use. "
"This option ignored if use_turbo is disabled. Default is '512:1024'.")
parser.add_argument("-n", "--num_models", default=5, type=int, help="specify how many model params to try. (Default is 5)")
parser.add_argument("-pt", "--use_ptm", action='store_true',
help="uses Deepmind's ptm finetuned model parameters to get PAE per structure. "
"Disable to use the original model params. (Disabling may give alternative structures.)")
parser.add_argument("-e", "--num_ensemble", default=1, type=int, choices=[1, 8],
help="the trunk of the network is run multiple times with different random choices for the MSA cluster centers. "
"(1=default, 8=casp14 setting)")
parser.add_argument("-r", "--max_recycles", default=3, type=int, help="controls the maximum number of times the structure is fed back into the neural network for refinement. (default is 3)")
parser.add_argument("--tol", default=0, type=float, help="tolerance for deciding when to stop (CA-RMS between recycles)")
parser.add_argument("--is_training", action='store_true',
help="enables the stochastic part of the model (dropout), when coupled with num_samples can be used to 'sample' a diverse set of structures. False (NOT specifying this option) is recommended at first.")
parser.add_argument("--num_samples", default=1, type=int, help="number of random_seeds to try. Default is 1.")
parser.add_argument("--num_relax", default="None", choices=["None", "Top1", "Top5", "All"],
help="num_relax is 'None' (default), 'Top1', 'Top5' or 'All'. Specify how many of the top ranked structures to relax.")
args = parser.parse_args()
## command-line arguments
### Check your OS for localcolabfold
import platform
pf = platform.system()
if pf == 'Windows':
print('ColabFold on Windows')
elif pf == 'Darwin':
print('ColabFold on Mac')
device="cpu"
elif pf == 'Linux':
print('ColabFold on Linux')
device="gpu"
#%%
### python code of AlphaFold2_advanced.ipynb
import os
import tensorflow as tf
tf.config.set_visible_devices([], 'GPU')
import jax
from IPython.utils import io
import subprocess
import tqdm.notebook
# --- Python imports ---
import colabfold as cf
import pairmsa
import sys
import pickle
from urllib import request
from concurrent import futures
import json
from matplotlib import gridspec
import matplotlib.pyplot as plt
import numpy as np
import py3Dmol
from urllib import request
from concurrent import futures
import json
from matplotlib import gridspec
import matplotlib.pyplot as plt
import numpy as np
import py3Dmol
from alphafold.model import model
from alphafold.model import config
from alphafold.model import data
from alphafold.data import parsers
from alphafold.data import pipeline
from alphafold.data.tools import jackhmmer
from alphafold.common import protein
def run_jackhmmer(sequence, prefix):
fasta_path = f"{prefix}.fasta"
with open(fasta_path, 'wt') as f:
f.write(f'>query\n{sequence}')
pickled_msa_path = f"{prefix}.jackhmmer.pickle"
if os.path.isfile(pickled_msa_path):
msas_dict = pickle.load(open(pickled_msa_path,"rb"))
msas, deletion_matrices, names = (msas_dict[k] for k in ['msas', 'deletion_matrices', 'names'])
full_msa = []
for msa in msas:
full_msa += msa
else:
# --- Find the closest source ---
test_url_pattern = 'https://storage.googleapis.com/alphafold-colab{:s}/latest/uniref90_2021_03.fasta.1'
ex = futures.ThreadPoolExecutor(3)
def fetch(source):
request.urlretrieve(test_url_pattern.format(source))
return source
fs = [ex.submit(fetch, source) for source in ['', '-europe', '-asia']]
source = None
for f in futures.as_completed(fs):
source = f.result()
ex.shutdown()
break
jackhmmer_binary_path = '/usr/bin/jackhmmer'
dbs = []
num_jackhmmer_chunks = {'uniref90': 59, 'smallbfd': 17, 'mgnify': 71}
total_jackhmmer_chunks = sum(num_jackhmmer_chunks.values())
with tqdm.notebook.tqdm(total=total_jackhmmer_chunks, bar_format=TQDM_BAR_FORMAT) as pbar:
def jackhmmer_chunk_callback(i):
pbar.update(n=1)
pbar.set_description('Searching uniref90')
jackhmmer_uniref90_runner = jackhmmer.Jackhmmer(
binary_path=jackhmmer_binary_path,
database_path=f'https://storage.googleapis.com/alphafold-colab{source}/latest/uniref90_2021_03.fasta',
get_tblout=True,
num_streamed_chunks=num_jackhmmer_chunks['uniref90'],
streaming_callback=jackhmmer_chunk_callback,
z_value=135301051)
dbs.append(('uniref90', jackhmmer_uniref90_runner.query(fasta_path)))
pbar.set_description('Searching smallbfd')
jackhmmer_smallbfd_runner = jackhmmer.Jackhmmer(
binary_path=jackhmmer_binary_path,
database_path=f'https://storage.googleapis.com/alphafold-colab{source}/latest/bfd-first_non_consensus_sequences.fasta',
get_tblout=True,
num_streamed_chunks=num_jackhmmer_chunks['smallbfd'],
streaming_callback=jackhmmer_chunk_callback,
z_value=65984053)
dbs.append(('smallbfd', jackhmmer_smallbfd_runner.query(fasta_path)))
pbar.set_description('Searching mgnify')
jackhmmer_mgnify_runner = jackhmmer.Jackhmmer(
binary_path=jackhmmer_binary_path,
database_path=f'https://storage.googleapis.com/alphafold-colab{source}/latest/mgy_clusters_2019_05.fasta',
get_tblout=True,
num_streamed_chunks=num_jackhmmer_chunks['mgnify'],
streaming_callback=jackhmmer_chunk_callback,
z_value=304820129)
dbs.append(('mgnify', jackhmmer_mgnify_runner.query(fasta_path)))
# --- Extract the MSAs and visualize ---
# Extract the MSAs from the Stockholm files.
# NB: deduplication happens later in pipeline.make_msa_features.
mgnify_max_hits = 501
msas = []
deletion_matrices = []
names = []
for db_name, db_results in dbs:
unsorted_results = []
for i, result in enumerate(db_results):
msa, deletion_matrix, target_names = parsers.parse_stockholm(result['sto'])
e_values_dict = parsers.parse_e_values_from_tblout(result['tbl'])
e_values = [e_values_dict[t.split('/')[0]] for t in target_names]
zipped_results = zip(msa, deletion_matrix, target_names, e_values)
if i != 0:
# Only take query from the first chunk
zipped_results = [x for x in zipped_results if x[2] != 'query']
unsorted_results.extend(zipped_results)
sorted_by_evalue = sorted(unsorted_results, key=lambda x: x[3])
db_msas, db_deletion_matrices, db_names, _ = zip(*sorted_by_evalue)
if db_msas:
if db_name == 'mgnify':
db_msas = db_msas[:mgnify_max_hits]
db_deletion_matrices = db_deletion_matrices[:mgnify_max_hits]
db_names = db_names[:mgnify_max_hits]
msas.append(db_msas)
deletion_matrices.append(db_deletion_matrices)
names.append(db_names)
msa_size = len(set(db_msas))
print(f'{msa_size} Sequences Found in {db_name}')
pickle.dump({"msas":msas,
"deletion_matrices":deletion_matrices,
"names":names}, open(pickled_msa_path,"wb"))
return msas, deletion_matrices, names
#%%
import re
# --read sequence from input file--
from Bio import SeqIO
def readfastafile(fastafile):
records = list(SeqIO.parse(fastafile, "fasta"))
if(len(records) != 1):
raise ValueError('Input FASTA file must have a single ID/sequence.')
else:
return records[0].id, records[0].seq
print("Input ID: {}".format(readfastafile(args.input)[0]))
print("Input Sequence: {}".format(readfastafile(args.input)[1]))
sequence = str(readfastafile(args.input)[1])
# --read sequence from input file--
sequence = re.sub("[^A-Z:/]", "", sequence.upper())
sequence = re.sub(":+",":",sequence)
sequence = re.sub("/+","/",sequence)
sequence = re.sub("^[:/]+","",sequence)
sequence = re.sub("[:/]+$","",sequence)
jobname = "test" #@param {type:"string"}
jobname = re.sub(r'\W+', '', jobname)
# define number of copies
homooligomer = args.homooligomer #@param {type:"string"}
homooligomer = re.sub("[:/]+",":",homooligomer)
homooligomer = re.sub("^[:/]+","",homooligomer)
homooligomer = re.sub("[:/]+$","",homooligomer)
if len(homooligomer) == 0: homooligomer = "1"
homooligomer = re.sub("[^0-9:]", "", homooligomer)
homooligomers = [int(h) for h in homooligomer.split(":")]
#@markdown - `sequence` Specify protein sequence to be modelled.
#@markdown - Use `/` to specify intra-protein chainbreaks (for trimming regions within protein).
#@markdown - Use `:` to specify inter-protein chainbreaks (for modeling protein-protein hetero-complexes).
#@markdown - For example, sequence `AC/DE:FGH` will be modelled as polypeptides: `AC`, `DE` and `FGH`. A separate MSA will be generates for `ACDE` and `FGH`.
#@markdown If `pair_msa` is enabled, `ACDE`'s MSA will be paired with `FGH`'s MSA.
#@markdown - `homooligomer` Define number of copies in a homo-oligomeric assembly.
#@markdown - Use `:` to specify different homooligomeric state (copy numer) for each component of the complex.
#@markdown - For example, **sequence:**`ABC:DEF`, **homooligomer:** `2:1`, the first protein `ABC` will be modeled as a homodimer (2 copies) and second `DEF` a monomer (1 copy).
ori_sequence = sequence
sequence = sequence.replace("/","").replace(":","")
seqs = ori_sequence.replace("/","").split(":")
if len(seqs) != len(homooligomers):
if len(homooligomers) == 1:
homooligomers = [homooligomers[0]] * len(seqs)
homooligomer = ":".join([str(h) for h in homooligomers])
else:
while len(seqs) > len(homooligomers):
homooligomers.append(1)
homooligomers = homooligomers[:len(seqs)]
homooligomer = ":".join([str(h) for h in homooligomers])
print("WARNING: Mismatch between number of breaks ':' in 'sequence' and 'homooligomer' definition")
full_sequence = "".join([s*h for s,h in zip(seqs,homooligomers)])
# prediction directory
# --set the output directory from command-line arguments
if args.output_dir == "":
output_dir = 'prediction_' + jobname + '_' + cf.get_hash(full_sequence)[:5]
else:
output_dir = args.output_dir
# --set the output directory from command-line arguments
os.makedirs(output_dir, exist_ok=True)
# delete existing files in working directory
for f in os.listdir(output_dir):
os.remove(os.path.join(output_dir, f))
MIN_SEQUENCE_LENGTH = 16
MAX_SEQUENCE_LENGTH = 2500
aatypes = set('ACDEFGHIKLMNPQRSTVWY') # 20 standard aatypes
if not set(full_sequence).issubset(aatypes):
raise Exception(f'Input sequence contains non-amino acid letters: {set(sequence) - aatypes}. AlphaFold only supports 20 standard amino acids as inputs.')
if len(full_sequence) < MIN_SEQUENCE_LENGTH:
raise Exception(f'Input sequence is too short: {len(full_sequence)} amino acids, while the minimum is {MIN_SEQUENCE_LENGTH}')
if len(full_sequence) > MAX_SEQUENCE_LENGTH:
raise Exception(f'Input sequence is too long: {len(full_sequence)} amino acids, while the maximum is {MAX_SEQUENCE_LENGTH}. Please use the full AlphaFold system for long sequences.')
if len(full_sequence) > 1400:
print(f"WARNING: For a typical Google-Colab-GPU (16G) session, the max total length is ~1400 residues. You are at {len(full_sequence)}! Run Alphafold may crash.")
print(f"homooligomer: '{homooligomer}'")
print(f"total_length: '{len(full_sequence)}'")
print(f"working_directory: '{output_dir}'")
#%%
TQDM_BAR_FORMAT = '{l_bar}{bar}| {n_fmt}/{total_fmt} [elapsed: {elapsed} remaining: {remaining}]'
#@markdown Once this cell has been executed, you will see
#@markdown statistics about the multiple sequence alignment
#@markdown (MSA) that will be used by AlphaFold. In particular,
#@markdown you’ll see how well each residue is covered by similar
#@markdown sequences in the MSA.
#@markdown (Note that the search against databases and the actual prediction can take some time, from minutes to hours, depending on the length of the protein and what type of GPU you are allocated by Colab.)
#@markdown ---
msa_method = args.msa_method #@param ["mmseqs2","jackhmmer","single_sequence","precomputed"]
#@markdown ---
#@markdown **custom msa options**
add_custom_msa = False #@param {type:"boolean"}
msa_format = "fas" #@param ["fas","a2m","a3m","sto","psi","clu"]
#@markdown - `add_custom_msa` - If enabled, you'll get an option to upload your custom MSA in the specified `msa_format`. Note: Your MSA will be supplemented with those from 'mmseqs2' or 'jackhmmer', unless `msa_method` is set to 'single_sequence'.
# --set the output directory from command-line arguments
pair_mode = args.pair_mode #@param ["unpaired","unpaired+paired","paired"] {type:"string"}
pair_cov = args.pair_cov #@param [0,25,50,75,90] {type:"raw"}
pair_qid = args.pair_qid #@param [0,15,20,30,40,50] {type:"raw"}
# --set the output directory from command-line arguments
# --- Search against genetic databases ---
os.makedirs('tmp', exist_ok=True)
msas, deletion_matrices = [],[]
if add_custom_msa:
print(f"upload custom msa in '{msa_format}' format")
msa_dict = files.upload()
lines = msa_dict[list(msa_dict.keys())[0]].decode()
# convert to a3m
with open(f"tmp/upload.{msa_format}","w") as tmp_upload:
tmp_upload.write(lines)
os.system(f"reformat.pl {msa_format} a3m tmp/upload.{msa_format} tmp/upload.a3m")
a3m_lines = open("tmp/upload.a3m","r").read()
# parse
msa, mtx = parsers.parse_a3m(a3m_lines)
msas.append(msa)
deletion_matrices.append(mtx)
if len(msas[0][0]) != len(sequence):
raise ValueError("ERROR: the length of msa does not match input sequence")
if msa_method == "precomputed":
print("upload precomputed pickled msa from previous run")
pickled_msa_dict = files.upload()
msas_dict = pickle.loads(pickled_msa_dict[list(pickled_msa_dict.keys())[0]])
msas, deletion_matrices = (msas_dict[k] for k in ['msas', 'deletion_matrices'])
elif msa_method == "single_sequence":
if len(msas) == 0:
msas.append([sequence])
deletion_matrices.append([[0]*len(sequence)])
else:
seqs = ori_sequence.replace('/','').split(':')
_blank_seq = ["-" * len(seq) for seq in seqs]
_blank_mtx = [[0] * len(seq) for seq in seqs]
def _pad(ns,vals,mode):
if mode == "seq": _blank = _blank_seq.copy()
if mode == "mtx": _blank = _blank_mtx.copy()
if isinstance(ns, list):
for n,val in zip(ns,vals): _blank[n] = val
else: _blank[ns] = vals
if mode == "seq": return "".join(_blank)
if mode == "mtx": return sum(_blank,[])
if len(seqs) == 1 or "unpaired" in pair_mode:
# gather msas
if msa_method == "mmseqs2":
prefix = cf.get_hash("".join(seqs))
prefix = os.path.join('tmp',prefix)
print(f"running mmseqs2")
A3M_LINES = cf.run_mmseqs2(seqs, prefix, filter=True)
for n, seq in enumerate(seqs):
# tmp directory
prefix = cf.get_hash(seq)
prefix = os.path.join('tmp',prefix)
if msa_method == "mmseqs2":
# run mmseqs2
a3m_lines = A3M_LINES[n]
msa, mtx = parsers.parse_a3m(a3m_lines)
msas_, mtxs_ = [msa],[mtx]
elif msa_method == "jackhmmer":
print(f"running jackhmmer on seq_{n}")
# run jackhmmer
msas_, mtxs_, names_ = ([sum(x,())] for x in run_jackhmmer(seq, prefix))
# pad sequences
for msa_,mtx_ in zip(msas_,mtxs_):
msa,mtx = [sequence],[[0]*len(sequence)]
for s,m in zip(msa_,mtx_):
msa.append(_pad(n,s,"seq"))
mtx.append(_pad(n,m,"mtx"))
msas.append(msa)
deletion_matrices.append(mtx)
####################################################################################
# PAIR_MSA
####################################################################################
if len(seqs) > 1 and (pair_mode == "paired" or pair_mode == "unpaired+paired"):
print("attempting to pair some sequences...")
if msa_method == "mmseqs2":
prefix = cf.get_hash("".join(seqs))
prefix = os.path.join('tmp',prefix)
print(f"running mmseqs2_noenv_nofilter on all seqs")
A3M_LINES = cf.run_mmseqs2(seqs, prefix, use_env=False, use_filter=False)
_data = []
for a in range(len(seqs)):
print(f"prepping seq_{a}")
_seq = seqs[a]
_prefix = os.path.join('tmp',cf.get_hash(_seq))
if msa_method == "mmseqs2":
a3m_lines = A3M_LINES[a]
_msa, _mtx, _lab = pairmsa.parse_a3m(a3m_lines,
filter_qid=pair_qid/100,
filter_cov=pair_cov/100)
elif msa_method == "jackhmmer":
_msas, _mtxs, _names = run_jackhmmer(_seq, _prefix)
_msa, _mtx, _lab = pairmsa.get_uni_jackhmmer(_msas[0], _mtxs[0], _names[0],
filter_qid=pair_qid/100,
filter_cov=pair_cov/100)
if len(_msa) > 1:
_data.append(pairmsa.hash_it(_msa, _lab, _mtx, call_uniprot=False))
else:
_data.append(None)
Ln = len(seqs)
O = [[None for _ in seqs] for _ in seqs]
for a in range(Ln):
if _data[a] is not None:
for b in range(a+1,Ln):
if _data[b] is not None:
print(f"attempting pairwise stitch for {a} {b}")
O[a][b] = pairmsa._stitch(_data[a],_data[b])
_seq_a, _seq_b, _mtx_a, _mtx_b = (*O[a][b]["seq"],*O[a][b]["mtx"])
##############################################
# filter to remove redundant sequences
##############################################
ok = []
with open("tmp/tmp.fas","w") as fas_file:
fas_file.writelines([f">{n}\n{a+b}\n" for n,(a,b) in enumerate(zip(_seq_a,_seq_b))])
os.system("hhfilter -maxseq 1000000 -i tmp/tmp.fas -o tmp/tmp.id90.fas -id 90")
for line in open("tmp/tmp.id90.fas","r"):
if line.startswith(">"): ok.append(int(line[1:]))
##############################################
print(f"found {len(_seq_a)} pairs ({len(ok)} after filtering)")
if len(_seq_a) > 0:
msa,mtx = [sequence],[[0]*len(sequence)]
for s_a,s_b,m_a,m_b in zip(_seq_a, _seq_b, _mtx_a, _mtx_b):
msa.append(_pad([a,b],[s_a,s_b],"seq"))
mtx.append(_pad([a,b],[m_a,m_b],"mtx"))
msas.append(msa)
deletion_matrices.append(mtx)
'''
# triwise stitching (WIP)
if Ln > 2:
for a in range(Ln):
for b in range(a+1,Ln):
for c in range(b+1,Ln):
if O[a][b] is not None and O[b][c] is not None:
print(f"attempting triwise stitch for {a} {b} {c}")
list_ab = O[a][b]["lab"][1]
list_bc = O[b][c]["lab"][0]
msa,mtx = [sequence],[[0]*len(sequence)]
for i,l_b in enumerate(list_ab):
if l_b in list_bc:
j = list_bc.index(l_b)
s_a = O[a][b]["seq"][0][i]
s_b = O[a][b]["seq"][1][i]
s_c = O[b][c]["seq"][1][j]
m_a = O[a][b]["mtx"][0][i]
m_b = O[a][b]["mtx"][1][i]
m_c = O[b][c]["mtx"][1][j]
msa.append(_pad([a,b,c],[s_a,s_b,s_c],"seq"))
mtx.append(_pad([a,b,c],[m_a,m_b,m_c],"mtx"))
if len(msa) > 1:
msas.append(msa)
deletion_matrices.append(mtx)
print(f"found {len(msa)} triplets")
'''
####################################################################################
####################################################################################
# save MSA as pickle
pickle.dump({"msas":msas,"deletion_matrices":deletion_matrices},
open(os.path.join(output_dir,"msa.pickle"),"wb"))
make_msa_plot = len(msas[0]) > 1
if make_msa_plot:
plt = cf.plot_msas(msas, ori_sequence)
plt.savefig(os.path.join(output_dir,"msa_coverage.png"), bbox_inches = 'tight', dpi=300)
#%%
##@title run alphafold
# --------set parameters from command-line arguments--------
num_relax = args.num_relax
rank_by = args.rank_by
use_turbo = True if args.use_turbo else False
max_msa = args.max_msa
# --------set parameters from command-line arguments--------
max_msa_clusters, max_extra_msa = [int(x) for x in max_msa.split(":")]
#@markdown - `rank_by` specify metric to use for ranking models (For protein-protein complexes, we recommend pTMscore)
#@markdown - `use_turbo` introduces a few modifications (compile once, swap params, adjust max_msa) to speedup and reduce memory requirements. Disable for default behavior.
#@markdown - `max_msa` defines: `max_msa_clusters:max_extra_msa` number of sequences to use. When adjusting after GPU crash, be sure to `Runtime` → `Restart runtime`. (Lowering will reduce GPU requirements, but may result in poor model quality. This option ignored if `use_turbo` is disabled)
show_images = True #@param {type:"boolean"}
#@markdown - `show_images` To make things more exciting we show images of the predicted structures as they are being generated. (WARNING: the order of images displayed does not reflect any ranking).
#@markdown ---
#@markdown #### Sampling options
#@markdown There are two stochastic parts of the pipeline. Within the feature generation (choice of cluster centers) and within the model (dropout).
#@markdown To get structure diversity, you can iterate through a fixed number of random_seeds (using `num_samples`) and/or enable dropout (using `is_training`).
# --------set parameters from command-line arguments--------
num_models = args.num_models
use_ptm = True if args.use_ptm else False
num_ensemble = args.num_ensemble
max_recycles = args.max_recycles
tol = args.tol
is_training = True if args.is_training else False
num_samples = args.num_samples
# --------set parameters from command-line arguments--------
subsample_msa = True #@param {type:"boolean"}
#@markdown - `subsample_msa` subsample large MSA to `3E7/length` sequences to avoid crashing the preprocessing protocol. (This option ignored if `use_turbo` is disabled.)
save_pae_json = True
save_tmp_pdb = True
if use_ptm == False and rank_by == "pTMscore":
print("WARNING: models will be ranked by pLDDT, 'use_ptm' is needed to compute pTMscore")
rank_by = "pLDDT"
#############################
# delete old files
#############################
for f in os.listdir(output_dir):
if "rank_" in f:
os.remove(os.path.join(output_dir, f))
#############################
# homooligomerize
#############################
lengths = [len(seq) for seq in seqs]
msas_mod, deletion_matrices_mod = cf.homooligomerize_heterooligomer(msas, deletion_matrices,
lengths, homooligomers)
#############################
# define input features
#############################
def _placeholder_template_feats(num_templates_, num_res_):
return {
'template_aatype': np.zeros([num_templates_, num_res_, 22], np.float32),
'template_all_atom_masks': np.zeros([num_templates_, num_res_, 37, 3], np.float32),
'template_all_atom_positions': np.zeros([num_templates_, num_res_, 37], np.float32),
'template_domain_names': np.zeros([num_templates_], np.float32),
'template_sum_probs': np.zeros([num_templates_], np.float32),
}
num_res = len(full_sequence)
feature_dict = {}
feature_dict.update(pipeline.make_sequence_features(full_sequence, 'test', num_res))
feature_dict.update(pipeline.make_msa_features(msas_mod, deletion_matrices=deletion_matrices_mod))
if not use_turbo:
feature_dict.update(_placeholder_template_feats(0, num_res))
def do_subsample_msa(F, random_seed=0):
'''subsample msa to avoid running out of memory'''
N = len(F["msa"])
L = len(F["residue_index"])
N_ = int(3E7/L)
if N > N_:
print(f"whhhaaa... too many sequences ({N}) subsampling to {N_}")
np.random.seed(random_seed)
idx = np.append(0,np.random.permutation(np.arange(1,N)))[:N_]
F_ = {}
F_["msa"] = F["msa"][idx]
F_["deletion_matrix_int"] = F["deletion_matrix_int"][idx]
F_["num_alignments"] = np.full_like(F["num_alignments"],N_)
for k in ['aatype', 'between_segment_residues',
'domain_name', 'residue_index',
'seq_length', 'sequence']:
F_[k] = F[k]
return F_
else:
return F
################################
# set chain breaks
################################
Ls = []
for seq,h in zip(ori_sequence.split(":"),homooligomers):
Ls += [len(s) for s in seq.split("/")] * h
Ls_plot = sum([[len(seq)]*h for seq,h in zip(seqs,homooligomers)],[])
feature_dict['residue_index'] = cf.chain_break(feature_dict['residue_index'], Ls)
###########################
# run alphafold
###########################
def parse_results(prediction_result, processed_feature_dict):
b_factors = prediction_result['plddt'][:,None] * prediction_result['structure_module']['final_atom_mask']
dist_bins = jax.numpy.append(0,prediction_result["distogram"]["bin_edges"])
dist_mtx = dist_bins[prediction_result["distogram"]["logits"].argmax(-1)]
contact_mtx = jax.nn.softmax(prediction_result["distogram"]["logits"])[:,:,dist_bins < 8].sum(-1)
out = {"unrelaxed_protein": protein.from_prediction(processed_feature_dict, prediction_result, b_factors=b_factors),
"plddt": prediction_result['plddt'],
"pLDDT": prediction_result['plddt'].mean(),
"dists": dist_mtx,
"adj": contact_mtx}
if "ptm" in prediction_result:
out.update({"pae": prediction_result['predicted_aligned_error'],
"pTMscore": prediction_result['ptm']})
return out
model_names = ['model_1', 'model_2', 'model_3', 'model_4', 'model_5'][:num_models]
total = len(model_names) * num_samples
with tqdm.notebook.tqdm(total=total, bar_format=TQDM_BAR_FORMAT) as pbar:
#######################################################################
# precompile model and recompile only if length changes
#######################################################################
if use_turbo:
name = "model_5_ptm" if use_ptm else "model_5"
N = len(feature_dict["msa"])
L = len(feature_dict["residue_index"])
compiled = (N, L, use_ptm, max_recycles, tol, num_ensemble, max_msa, is_training)
if "COMPILED" in dir():
if COMPILED != compiled: recompile = True
else: recompile = True
if recompile:
cf.clear_mem("gpu")
cfg = config.model_config(name)
# set size of msa (to reduce memory requirements)
msa_clusters = min(N, max_msa_clusters)
cfg.data.eval.max_msa_clusters = msa_clusters
cfg.data.common.max_extra_msa = max(min(N-msa_clusters,max_extra_msa),1)
cfg.data.common.num_recycle = max_recycles
cfg.model.num_recycle = max_recycles
cfg.model.recycle_tol = tol
cfg.data.eval.num_ensemble = num_ensemble
params = data.get_model_haiku_params(name,'./alphafold/data')
model_runner = model.RunModel(cfg, params, is_training=is_training)
COMPILED = compiled
recompile = False
else:
cf.clear_mem("gpu")
recompile = True
# cleanup
if "outs" in dir(): del outs
outs = {}
cf.clear_mem("cpu")
#######################################################################
def report(key):
pbar.update(n=1)
o = outs[key]
line = f"{key} recycles:{o['recycles']} tol:{o['tol']:.2f} pLDDT:{o['pLDDT']:.2f}"
if use_ptm: line += f" pTMscore:{o['pTMscore']:.2f}"
print(line)
if show_images:
fig = cf.plot_protein(o['unrelaxed_protein'], Ls=Ls_plot, dpi=100)
# plt.show()
plt.ion()
if save_tmp_pdb:
tmp_pdb_path = os.path.join(output_dir,f'unranked_{key}_unrelaxed.pdb')
pdb_lines = protein.to_pdb(o['unrelaxed_protein'])
with open(tmp_pdb_path, 'w') as f: f.write(pdb_lines)
if use_turbo:
# go through each random_seed
for seed in range(num_samples):
# prep input features
if subsample_msa:
sampled_feats_dict = do_subsample_msa(feature_dict, random_seed=seed)
processed_feature_dict = model_runner.process_features(sampled_feats_dict, random_seed=seed)
else:
processed_feature_dict = model_runner.process_features(feature_dict, random_seed=seed)
# go through each model
for num, model_name in enumerate(model_names):
name = model_name+"_ptm" if use_ptm else model_name
key = f"{name}_seed_{seed}"
pbar.set_description(f'Running {key}')
# replace model parameters
params = data.get_model_haiku_params(name, './alphafold/data')
for k in model_runner.params.keys():
model_runner.params[k] = params[k]
# predict
prediction_result, (r, t) = cf.to(model_runner.predict(processed_feature_dict, random_seed=seed),"cpu")
# save results
outs[key] = parse_results(prediction_result, processed_feature_dict)
outs[key].update({"recycles":r, "tol":t})
report(key)
del prediction_result, params
del sampled_feats_dict, processed_feature_dict
else:
# go through each model
for num, model_name in enumerate(model_names):
name = model_name+"_ptm" if use_ptm else model_name
params = data.get_model_haiku_params(name, './alphafold/data')
cfg = config.model_config(name)
cfg.data.common.num_recycle = cfg.model.num_recycle = max_recycles
cfg.model.recycle_tol = tol
cfg.data.eval.num_ensemble = num_ensemble
model_runner = model.RunModel(cfg, params, is_training=is_training)
# go through each random_seed
for seed in range(num_samples):
key = f"{name}_seed_{seed}"
pbar.set_description(f'Running {key}')
processed_feature_dict = model_runner.process_features(feature_dict, random_seed=seed)
prediction_result, (r, t) = cf.to(model_runner.predict(processed_feature_dict, random_seed=seed),"cpu")
outs[key] = parse_results(prediction_result, processed_feature_dict)
outs[key].update({"recycles":r, "tol":t})
report(key)
# cleanup
del processed_feature_dict, prediction_result
del params, model_runner, cfg
cf.clear_mem("gpu")
# delete old files
for f in os.listdir(output_dir):
if "rank" in f:
os.remove(os.path.join(output_dir, f))
# Find the best model according to the mean pLDDT.
model_rank = list(outs.keys())
model_rank = [model_rank[i] for i in np.argsort([outs[x][rank_by] for x in model_rank])[::-1]]
# Write out the prediction
for n,key in enumerate(model_rank):
prefix = f"rank_{n+1}_{key}"
pred_output_path = os.path.join(output_dir,f'{prefix}_unrelaxed.pdb')
fig = cf.plot_protein(outs[key]["unrelaxed_protein"], Ls=Ls_plot, dpi=200)
plt.savefig(os.path.join(output_dir,f'{prefix}.png'), bbox_inches = 'tight')
plt.close(fig)
pdb_lines = protein.to_pdb(outs[key]["unrelaxed_protein"])
with open(pred_output_path, 'w') as f:
f.write(pdb_lines)
############################################################
print(f"model rank based on {rank_by}")
for n,key in enumerate(model_rank):
print(f"rank_{n+1}_{key} {rank_by}:{outs[key][rank_by]:.2f}")
#%%
#@title Refine structures with Amber-Relax (Optional)
# --------set parameters from command-line arguments--------
num_relax = args.num_relax
# --------set parameters from command-line arguments--------
if num_relax == "None":
num_relax = 0
elif num_relax == "Top1":
num_relax = 1
elif num_relax == "Top5":
num_relax = 5
else:
num_relax = len(model_names) * num_samples
if num_relax > 0:
if "relax" not in dir():
# add conda environment to path
sys.path.append('./colabfold-conda/lib/python3.7/site-packages')
# import libraries
from alphafold.relax import relax
from alphafold.relax import utils
with tqdm.notebook.tqdm(total=num_relax, bar_format=TQDM_BAR_FORMAT) as pbar:
pbar.set_description(f'AMBER relaxation')
for n,key in enumerate(model_rank):
if n < num_relax:
prefix = f"rank_{n+1}_{key}"
pred_output_path = os.path.join(output_dir,f'{prefix}_relaxed.pdb')
if not os.path.isfile(pred_output_path):
amber_relaxer = relax.AmberRelaxation(
max_iterations=0,
tolerance=2.39,
stiffness=10.0,
exclude_residues=[],
max_outer_iterations=20)
relaxed_pdb_lines, _, _ = amber_relaxer.process(prot=outs[key]["unrelaxed_protein"])
with open(pred_output_path, 'w') as f:
f.write(relaxed_pdb_lines)
pbar.update(n=1)
#%%
#@title Display 3D structure {run: "auto"}
rank_num = 1 #@param ["1", "2", "3", "4", "5"] {type:"raw"}
color = "lDDT" #@param ["chain", "lDDT", "rainbow"]
show_sidechains = False #@param {type:"boolean"}
show_mainchains = False #@param {type:"boolean"}
key = model_rank[rank_num-1]
prefix = f"rank_{rank_num}_{key}"
pred_output_path = os.path.join(output_dir,f'{prefix}_relaxed.pdb')
if not os.path.isfile(pred_output_path):
pred_output_path = os.path.join(output_dir,f'{prefix}_unrelaxed.pdb')
cf.show_pdb(pred_output_path, show_sidechains, show_mainchains, color, Ls=Ls_plot).show()
if color == "lDDT": cf.plot_plddt_legend().show()
if use_ptm:
cf.plot_confidence(outs[key]["plddt"], outs[key]["pae"], Ls=Ls_plot).show()
else:
cf.plot_confidence(outs[key]["plddt"], Ls=Ls_plot).show()
#%%
#@title Extra outputs
dpi = 300#@param {type:"integer"}
save_to_txt = True #@param {type:"boolean"}
save_pae_json = True #@param {type:"boolean"}
#@markdown - save data used to generate contact and distogram plots below to text file (pae values can be found in json file if `use_ptm` is enabled)
if use_ptm:
print("predicted alignment error")
cf.plot_paes([outs[k]["pae"] for k in model_rank], Ls=Ls_plot, dpi=dpi)
plt.savefig(os.path.join(output_dir,f'predicted_alignment_error.png'), bbox_inches = 'tight', dpi=np.maximum(200,dpi))
# plt.show()
print("predicted contacts")
cf.plot_adjs([outs[k]["adj"] for k in model_rank], Ls=Ls_plot, dpi=dpi)
plt.savefig(os.path.join(output_dir,f'predicted_contacts.png'), bbox_inches = 'tight', dpi=np.maximum(200,dpi))
# plt.show()
print("predicted distogram")
cf.plot_dists([outs[k]["dists"] for k in model_rank], Ls=Ls_plot, dpi=dpi)
plt.savefig(os.path.join(output_dir,f'predicted_distogram.png'), bbox_inches = 'tight', dpi=np.maximum(200,dpi))
# plt.show()
print("predicted LDDT")
cf.plot_plddts([outs[k]["plddt"] for k in model_rank], Ls=Ls_plot, dpi=dpi)
plt.savefig(os.path.join(output_dir,f'predicted_LDDT.png'), bbox_inches = 'tight', dpi=np.maximum(200,dpi))
# plt.show()
def do_save_to_txt(filename, adj, dists):
adj = np.asarray(adj)
dists = np.asarray(dists)
L = len(adj)
with open(filename,"w") as out:
out.write("i\tj\taa_i\taa_j\tp(cbcb<8)\tmaxdistbin\n")
for i in range(L):
for j in range(i+1,L):
if dists[i][j] < 21.68 or adj[i][j] >= 0.001:
line = f"{i+1}\t{j+1}\t{full_sequence[i]}\t{full_sequence[j]}\t{adj[i][j]:.3f}"
line += f"\t>{dists[i][j]:.2f}" if dists[i][j] == 21.6875 else f"\t{dists[i][j]:.2f}"
out.write(f"{line}\n")
for n,key in enumerate(model_rank):
if save_to_txt:
txt_filename = os.path.join(output_dir,f'rank_{n+1}_{key}.raw.txt')
do_save_to_txt(txt_filename,adj=outs[key]["adj"],dists=outs[key]["dists"])
if use_ptm and save_pae_json:
pae = outs[key]["pae"]
max_pae = pae.max()
# Save pLDDT and predicted aligned error (if it exists)
pae_output_path = os.path.join(output_dir,f'rank_{n+1}_{key}_pae.json')
# Save predicted aligned error in the same format as the AF EMBL DB
rounded_errors = np.round(np.asarray(pae), decimals=1)
indices = np.indices((len(rounded_errors), len(rounded_errors))) + 1
indices_1 = indices[0].flatten().tolist()
indices_2 = indices[1].flatten().tolist()
pae_data = json.dumps([{
'residue1': indices_1,
'residue2': indices_2,
'distance': rounded_errors.flatten().tolist(),
'max_predicted_aligned_error': max_pae.item()
}],
indent=None,
separators=(',', ':'))
with open(pae_output_path, 'w') as f:
f.write(pae_data)
#%%
|
nilq/baby-python
|
python
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="migeo", # Replace with your own username
version="v0.0.1",
author="Projeto EM UFPA/Petrobrás",
author_email="isadora.s.macedo@gmail.com",
description="Modelagem e inversão eletromagnética por volumes finitos: MT e MCSEM.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/projetoemBR/migeo-master",
download_url="https://github.com/projetoemBR/migeo-master/archive/v0.0.1.tar.gz",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: Unix",
],
install_requires=[
"numpy>=1.7",
"scipy>=1.4.0",
"pymatsolver>=0.1.1",
"matplotlib",
"properties>=0.5.2",
"vectormath>=0.2.0",
"discretize>=0.4.0",
"geoana>=0.0.4",
"empymod>=2.0.0",
"pandas",
"numba>=0.45.0",
"pyvista",
],
python_requires='>=3.7',
)
|
nilq/baby-python
|
python
|
# Author: Mohammad Samani
# Date: 1.12.2021
# Place: Basel, Switzerland
import time, platform, struct, binascii, asyncio
from config import conf
from error import RecordError
from bleak import BleakClient
from bleak.backends.winrt.client import BleakClientWinRT
import db
ADDRESS = conf['mac_address']
TEMP_UUID = conf['Sensors']['temperature']['uuid']
HUM_UUID = conf['Sensors']['humidity']['uuid']
async def getdata_windows():
"""
This function runs only on Windows. I have only tested this on Windows 10.
"""
async with BleakClientWinRT(ADDRESS, address_type="random", timeout=100) as client:
try:
tmp_bytearr = await client.read_gatt_char(TEMP_UUID)
temperature = struct.unpack('<f',tmp_bytearr)[0]
hum_bytearr = await client.read_gatt_char(HUM_UUID)
humidity = struct.unpack('<f',hum_bytearr)[0]
print(temperature, humidity)
return temperature, humidity
except Exception as ex:
print(ex)
RecordError(f"{ex}")
async def getdata_linux():
"""
This function runs only on Linux. I have only tested this on Raspbian running on a Raspberry Pi.
"""
async with BleakClient(ADDRESS, address_type="random", timeout=100) as client:
try:
tmp_bytearr = await client.read_gatt_char(TEMP_UUID)
temperature = struct.unpack('<f',tmp_bytearr)[0]
hum_bytearr = await client.read_gatt_char(HUM_UUID)
humidity = struct.unpack('<f',hum_bytearr)[0]
return temperature, humidity
except Exception as ex:
print(ex)
RecordError(f"{ex}")
system = platform.system()
if system not in ["Windows", "Linux"]:
print("You need either Windows or Linux to run this.")
exit(1)
while True:
time.sleep(conf['Logging']['SleepInLoop'])
if system == "Windows":
temperature, humidity = asyncio.run(getdata_windows())
if system == "Linux":
temperature, humidity = asyncio.run(getdata_linux())
db.record_logs(conf['Machine'],[[conf['Sensors']['humidity']['name'], humidity],[conf['Sensors']['temperature']['name'], temperature]])
|
nilq/baby-python
|
python
|
from datetime import datetime
from sqlalchemy import Column, Integer, String, DateTime
from app.db.session import Session
from app.models import Base
class Captcha(Base):
__tablename__ = 'covid_captcha'
id = Column(Integer, autoincrement=True, primary_key=True, comment="主键")
create_time = Column(DateTime, default=datetime.now, nullable=False, comment="创建时间")
update_time = Column(DateTime, default=datetime.now, nullable=False, comment="修改时间")
captcha = Column(String(255), comment="验证码")
session_id = Column(String(255), comment="session id")
expiration = Column(String(255), comment="过期时间")
@staticmethod
def get_captcha_by_session(*, db: Session, session: str, ):
try:
result = db.query(Captcha).filter_by(
session_id=session
).order_by(
Captcha.id.desc()
).first()
return result
except Exception as _:
db.rollback()
raise
finally:
db.close()
@staticmethod
def add_captcha(*, db: Session, captcha: str, session_id: str, expiration: str):
try:
new_captcha = Captcha(captcha=captcha, session_id=session_id, expiration=expiration)
db.add(new_captcha)
db.commit()
except Exception as _:
db.rollback()
raise
finally:
db.close()
# 创建表
# Base.metadata.create_all(engine)
|
nilq/baby-python
|
python
|
from migen import *
from photonsdi.constants import *
class FrameExtractor(Module):
def __init__(self, elementary_stream_count=2):
assert elementary_stream_count in [2]
datapath_width = elementary_stream_count * SDI_ELEMENTARY_STREAM_DATA_WIDTH
self.i_data = Signal(datapath_width)
# TODO
|
nilq/baby-python
|
python
|
from typing import NamedTuple, Dict, Generator
import re
CommandArgs = NamedTuple('CommandArgs',
[('command', str), ('args', list[str])])
def variable_expansion(word: str, environment: Dict[str, str]) -> str:
"""Подставляет значения из окружения вместо переменных."""
return re.sub(r'\$([^$\s\'"]+)',
lambda s: environment.get(s.group(1), ''),
word)
def get_words(string: str,
environment: Dict[str, str]) -> Generator[str, None, None]:
"""Разбивает строку на слова и подставляет переменные."""
for match in re.findall(r'([^\s"\']+)|"([^"]*)"|\'([^\']*)\'', string):
single_quoted_word = match[2]
if single_quoted_word:
yield single_quoted_word
else:
word = max(match)
yield variable_expansion(word, environment)
def name_args(command: str, environment: Dict[str, str]) -> CommandArgs:
"""Достаёт имя команды и её аргументы."""
match = re.fullmatch(r'(\S+)=([^\s"\']+|"[^"]*"|\'[^\']*\')', command)
if match:
variable = match.group(1)
value = match.group(2)
value = next(get_words(value, environment))
return CommandArgs('=', [variable, value])
name, *args = list(get_words(command, environment))
return CommandArgs(name, args)
def parse(line: str, environment: Dict[str, str]) -> list[CommandArgs]:
"""Парсит строку в список команд и их аргументов."""
commands = filter(None, re.split(r'\s+\|\s+', line.strip()))
return [name_args(command, environment) for command in commands]
|
nilq/baby-python
|
python
|
# coding=utf-8
from contracts import contract, describe_value, describe_type
from geometry import logger
import numpy as np
from .manifolds import DifferentiableManifold
#
# def array_to_lists(x):
# return x.tolist()
#
# def packet(space, rep, value):
# return {'space': space, 'repr': rep, 'value': value}
#
# @contract(x='SE3')
# def yaml_from_SE3(x):
# return packet('SE3', 'matrix', array_to_lists(x))
#
# @contract(x='se3')
# def yaml_from_se3(x):
# return packet('se3', 'matrix', array_to_lists(x))
#
# # what about user-centered?
# def yaml_from_TSE3(x):
# pose, vel = x
# return packet('TSE3', 'base-tangent',
# [yaml_from_SE3(pose), yaml_from_se3(vel)])
converters = {}
default_representation = {}
def register_yaml_converter(manifold_name, representation, converter):
if not manifold_name in default_representation:
default_representation[manifold_name] = representation
key = (manifold_name, representation)
assert not key in converters
converters[key] = converter
def get_default_representation(manifold):
if isinstance(manifold, DifferentiableManifold):
key = str(manifold)
else:
key = manifold
if not key in default_representation:
raise Exception('Cannot find representation for %s.' % manifold)
return default_representation[key]
@contract(returns='list[2]')
def to_yaml(manifold, value, representation=None):
if representation is None:
representation = get_default_representation(manifold)
key = (manifold, representation)
if not key in converters:
raise ValueError('Unknown format %s; I know %s.' %
(key, converters.keys()))
conv = converters[key]
try:
x = conv.to_yaml(value)
except:
msg = 'Error while trying to convert %s' % describe_value(value)
logger.error(msg)
raise
return ['%s:%s' % (manifold, representation), x]
@contract(x='list[2]')
def from_yaml(x):
if not isinstance(x, list):
raise ValueError('I expect a list with two elements.')
form = x[0]
if not isinstance(form, str):
raise ValueError('I expect a string describing the format,'
' not %s, while decoding %s' %
(describe_type(form), describe_value(x)))
value = x[1]
space, representation = form.split(':')
key = (space, representation)
if not key in converters:
raise ValueError('Unknown format %s; I know %s.' %
(key, converters.keys()))
conv = converters[key]
return conv.from_yaml(value)
class Representation(object):
def to_yaml(self, x):
pass
def from_yaml(self, y):
pass
class SE3_m44(Representation):
@staticmethod
@contract(x='SE3', returns='list[4](list[4](float))')
def to_yaml(x):
return x.tolist()
@staticmethod
@contract(y='list[4](list[4](float))', returns='SE3')
def from_yaml(y):
return np.array(y)
register_yaml_converter('SE3', 'm44', SE3_m44)
class se3_m44(Representation):
@staticmethod
def to_yaml(x):
return x.tolist()
@staticmethod
def from_yaml(y):
return np.array(y)
class TSE3_bt(Representation):
@staticmethod
def to_yaml(x):
a, b = x
return [SE3_m44.to_yaml(a), se3_m44.to_yaml(b)]
@staticmethod
def from_yaml(y):
return (SE3_m44.from_yaml(y[0]),
se3_m44.from_yaml(y[1]))
register_yaml_converter('TSE3', 'bt', TSE3_bt)
|
nilq/baby-python
|
python
|
from manga_py.provider import Provider
from .helpers import tapas_io
from .helpers.std import Std
class TapasIo(Provider, Std): # TODO: Login\Password
helper = None
def get_archive_name(self) -> str:
ch = self.chapter
return self.normal_arc_name([
ch['scene'],
ch['title']
])
def get_chapter_index(self) -> str:
return str(self.chapter['scene'])
def get_main_content(self):
content = self._storage.get('main_content', False)
return content if content else self.http_get(self.get_url())
def get_manga_name(self) -> str:
return self.re.search(r'seriesTitle\s*:\s*\'(.+)\',', self.content).group(1)
def get_chapters(self):
items = self.re.search(r'episodeList\s*:\s*(\[.+\]),', self.content).group(1)
return [i for i in self.json.loads(items)[::-1] if not i['locked']]
def get_files(self):
return self.helper.parse_chapter_content()
def get_cover(self) -> str:
return self._cover_from_content('#series-thumb img')
def prepare_cookies(self):
self.helper = tapas_io.TapasIo(self)
def book_meta(self) -> dict:
# todo meta
pass
def chapter_for_json(self):
return self.helper.chapter_url()
main = TapasIo
|
nilq/baby-python
|
python
|
import logging
from collections import namedtuple
from io import StringIO
from typing import List
from urllib.parse import quote_plus
from aiohttp import ClientSession, ClientTimeout
from lxml import etree
from lxml.html import HtmlElement
CourtInfo = namedtuple("CourtAddress", ["name", "address", "note"])
def to_url(param: str) -> str:
return quote_plus(param.lower().encode("cp1251"))
def parse_address(raw_data: List[str], city: str) -> List[str]:
result: List[str] = []
for row in raw_data:
if city.lower() in row.lower():
result.append(row)
return result
def parse_court_data(raw_data: str, city: str) -> List[CourtInfo]:
parser = etree.HTMLParser()
tree = etree.parse(StringIO(raw_data), parser)
names: List[HtmlElement] = tree.xpath("//table[@class='msSearchResultTbl msFullSearchResultTbl']/tr/td/a")
names = [n.text for n in names]
raw_addresses: List[str] = tree.xpath("//table[@class='msSearchResultTbl msFullSearchResultTbl']/tr/td/"
"div[@class='courtInfoCont']/text()")
addresses = parse_address(raw_addresses, city)
notes: List[HtmlElement] = tree.xpath("//table[@class='msSearchResultTbl msFullSearchResultTbl']/"
"tr[not(@class='firstRow')]/td[last()]")
notes = [ai.text for ai in notes]
result: List[CourtInfo] = []
for name, address, note in zip(names, addresses, notes):
result.append(CourtInfo(name, address, note))
return result
async def resolve_court_address(city: str, court_subj: str, street: str) -> List[CourtInfo]:
url: str = f"https://sudrf.ru/index.php?id=300&&act=go_search&searchtype=fs&court_type=RS&" \
f"fs_city={to_url(city)}" \
f"&fs_street={to_url(street)}" \
f"&court_subj={court_subj}"
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/94.0.4606.81 Safari/537.36"
}
try:
async with ClientSession() as session:
# For better user experience setup request timeout to 15 seconds
timeout = ClientTimeout(total=15)
async with session.get(url, headers=headers, ssl=False, timeout=timeout) as resp:
body: str = await resp.text()
result = parse_court_data(body, city)
return result
except Exception:
logger = logging.getLogger()
logger.exception("Error occurred during court address resolving")
return []
|
nilq/baby-python
|
python
|
import json
import logging
import os
import sys
import boto3
import domovoi
from botocore.exceptions import ClientError
pkg_root = os.path.abspath(os.path.join(os.path.dirname(__file__), 'domovoilib')) # noqa
sys.path.insert(0, pkg_root) # noqa
from dss import stepfunctions
from dss.stepfunctions import SFN_TEMPLATE_KEY, SFN_EXECUTION_KEY, SFN_INPUT_KEY, sfn_sns_topic
from dss.util import tracing
from dss.logging import configure_lambda_logging
logger = logging.getLogger(__name__)
configure_lambda_logging()
app = domovoi.Domovoi(configure_logs=False)
sqs = boto3.resource('sqs')
@app.sns_topic_subscriber(sfn_sns_topic)
def launch_sfn_run(event, context):
sns_msg = event["Records"][0]["Sns"]
logger.debug(f'sns_message: {sns_msg}')
msg = json.loads(sns_msg["Message"])
attrs = sns_msg["MessageAttributes"]
if 'DSS-REAPER-RETRY-COUNT' in attrs:
logger.info("Reprocessing attempts so far %s", attrs['DSS-REAPER-RETRY-COUNT']['Value'])
sfn_name_template = msg[SFN_TEMPLATE_KEY]
sfn_execution = msg[SFN_EXECUTION_KEY]
sfn_input = msg[SFN_INPUT_KEY]
logger.debug("Launching Step Function %s execution: %s input: %s}", sfn_name_template, sfn_execution, sfn_input)
try:
response = stepfunctions._step_functions_start_execution(sfn_name_template, sfn_execution, sfn_input)
logger.debug(f"Started step function execution: %s", str(response))
except ClientError as e:
if e.response.get('Error'):
if e.response['Error'].get('Code') == 'ExecutionAlreadyExists':
logger.warning("Execution id %s already exists for %s. Not retrying.", sfn_execution, sfn_name_template)
else:
logger.warning("Failed to start step function execution id %s: due to %s", sfn_execution, str(e))
raise e
|
nilq/baby-python
|
python
|
import py
from pypy.lang.prolog.interpreter.parsing import parse_file, TermBuilder
from pypy.lang.prolog.interpreter import engine, helper, term, error
from pypy.lang.prolog.builtin import builtins, builtins_list
from pypy.rlib.objectmodel import we_are_translated
class Builtin(object):
_immutable_ = True
def __init__(self, function, name, numargs, signature):
self.function = function
self.name = name
self.numargs = numargs
self.signature = signature
def call(self, engine, query, continuation):
return self.function(engine, query, continuation)
def _freeze_(self):
return True
def expose_builtin(func, name, unwrap_spec=None, handles_continuation=False,
translatable=True):
if isinstance(name, list):
expose_as = name
name = name[0]
else:
expose_as = [name]
if not name.isalnum():
name = func.func_name
funcname = "wrap_%s_%s" % (name, len(unwrap_spec))
code = ["def %s(engine, query, continuation):" % (funcname, )]
if not translatable:
code.append(" if we_are_translated():")
code.append(" raise error.UncatchableError('%s does not work in translated version')" % (name, ))
subargs = ["engine"]
if len(unwrap_spec):
code.append(" assert isinstance(query, term.Term)")
else:
code.append(" assert isinstance(query, term.Atom)")
for i, spec in enumerate(unwrap_spec):
varname = "var%s" % (i, )
subargs.append(varname)
if spec in ("obj", "callable", "int", "atom", "arithmetic"):
code.append(" %s = query.args[%s].dereference(engine.heap)" %
(varname, i))
elif spec in ("concrete", "list"):
code.append(" %s = query.args[%s].getvalue(engine.heap)" %
(varname, i))
if spec in ("int", "atom", "arithmetic", "list"):
code.append(
" if isinstance(%s, term.Var):" % (varname,))
code.append(
" error.throw_instantiation_error()")
if spec == "obj":
pass
elif spec == "concrete":
pass
elif spec == "callable":
code.append(
" if not isinstance(%s, term.Callable):" % (varname,))
code.append(
" error.throw_type_error('callable', %s)" % (varname,))
elif spec == "raw":
code.append(" %s = query.args[%s]" % (varname, i))
elif spec == "int":
code.append(" %s = helper.unwrap_int(%s)" % (varname, varname))
elif spec == "atom":
code.append(" %s = helper.unwrap_atom(%s)" % (varname, varname))
elif spec == "arithmetic":
code.append(" %s = %s.eval_arithmetic(engine)" %
(varname, varname))
elif spec == "list":
code.append(" %s = helper.unwrap_list(%s)" % (varname, varname))
else:
assert 0, "not implemented " + spec
if handles_continuation:
subargs.append("continuation")
call = " result = %s(%s)" % (func.func_name, ", ".join(subargs))
code.append(call)
if not handles_continuation:
code.append(" return continuation.call(engine, choice_point=False)")
else:
code.append(" return result")
miniglobals = globals().copy()
miniglobals[func.func_name] = func
exec py.code.Source("\n".join(code)).compile() in miniglobals
for name in expose_as:
signature = "%s/%s" % (name, len(unwrap_spec))
b = Builtin(miniglobals[funcname], funcname, len(unwrap_spec),
signature)
builtins[signature] = b
if signature in [",/2", "is/2"]:
builtins_list.insert(0, (signature, b))
else:
builtins_list.append((signature, b))
|
nilq/baby-python
|
python
|
import argparse
import errno
import json
import os
import shutil
import sys
import tempfile
import bdbag.bdbag_api
from galaxy.datatypes import sniff
from galaxy.datatypes.registry import Registry
from galaxy.datatypes.upload_util import (
handle_sniffable_binary_check,
handle_unsniffable_binary_check,
UploadProblemException,
)
from galaxy.util import in_directory
from galaxy.util.checkers import (
check_binary,
check_html,
)
from galaxy.util.compression_utils import CompressedFile
DESCRIPTION = """Data Import Script"""
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
args = _arg_parser().parse_args(argv)
registry = Registry()
registry.load_datatypes(root_dir=args.galaxy_root, config=args.datatypes_registry)
request_path = args.request
assert os.path.exists(request_path)
with open(request_path) as f:
request = json.load(f)
upload_config = UploadConfig(request, registry)
galaxy_json = _request_to_galaxy_json(upload_config, request)
with open("galaxy.json", "w") as f:
json.dump(galaxy_json, f)
def _request_to_galaxy_json(upload_config, request):
targets = request.get("targets", [])
fetched_targets = []
for target in targets:
fetched_target = _fetch_target(upload_config, target)
fetched_targets.append(fetched_target)
return {"__unnamed_outputs": fetched_targets}
def _fetch_target(upload_config, target):
destination = target.get("destination", None)
assert destination, "No destination defined."
def expand_elements_from(target_or_item):
elements_from = target_or_item.get("elements_from", None)
items = None
if elements_from:
if elements_from == "archive":
decompressed_directory = _decompress_target(target_or_item)
items = _directory_to_items(decompressed_directory)
elif elements_from == "bagit":
_, elements_from_path = _has_src_to_path(target_or_item)
items = _bagit_to_items(elements_from_path)
elif elements_from == "bagit_archive":
decompressed_directory = _decompress_target(target_or_item)
items = _bagit_to_items(decompressed_directory)
elif elements_from == "directory":
_, elements_from_path = _has_src_to_path(target_or_item)
items = _directory_to_items(elements_from_path)
else:
raise Exception("Unknown elements from type encountered [%s]" % elements_from)
if items:
del target_or_item["elements_from"]
target_or_item["elements"] = items
_for_each_src(expand_elements_from, target)
items = target.get("elements", None)
assert items is not None, "No element definition found for destination [%s]" % destination
fetched_target = {}
fetched_target["destination"] = destination
if "collection_type" in target:
fetched_target["collection_type"] = target["collection_type"]
if "name" in target:
fetched_target["name"] = target["name"]
def _resolve_src(item):
converted_path = None
name, path = _has_src_to_path(item)
dbkey = item.get("dbkey", "?")
requested_ext = item.get("ext", "auto")
info = item.get("info", None)
object_id = item.get("object_id", None)
link_data_only = upload_config.link_data_only
if "link_data_only" in item:
# Allow overriding this on a per file basis.
link_data_only = _link_data_only(item)
to_posix_lines = upload_config.get_option(item, "to_posix_lines")
space_to_tab = upload_config.get_option(item, "space_to_tab")
in_place = item.get("in_place", False)
purge_source = item.get("purge_source", True)
# Follow upload.py logic but without the auto-decompress logic.
registry = upload_config.registry
check_content = upload_config.check_content
data_type, ext = None, requested_ext
is_binary = check_binary(path)
if is_binary:
data_type, ext = handle_sniffable_binary_check(data_type, ext, path, registry)
if data_type is None:
root_datatype = registry.get_datatype_by_extension(ext)
if getattr(root_datatype, 'compressed', False):
data_type = 'compressed archive'
ext = ext
elif is_binary:
data_type, ext = handle_unsniffable_binary_check(
data_type, ext, path, name, is_binary, requested_ext, check_content, registry
)
if not data_type and check_content and check_html(path):
raise UploadProblemException('The uploaded file contains inappropriate HTML content')
if data_type != 'binary':
if not link_data_only:
if to_posix_lines:
if space_to_tab:
line_count, converted_path = sniff.convert_newlines_sep2tabs(path, in_place=in_place, tmp_dir=".")
else:
line_count, converted_path = sniff.convert_newlines(path, in_place=in_place, tmp_dir=".")
else:
if space_to_tab:
line_count, converted_path = sniff.sep2tabs(path, in_place=in_place, tmp_dir=".")
if requested_ext == 'auto':
ext = sniff.guess_ext(converted_path or path, registry.sniff_order)
else:
ext = requested_ext
data_type = ext
if ext == 'auto' and data_type == 'binary':
ext = 'data'
if ext == 'auto' and requested_ext:
ext = requested_ext
if ext == 'auto':
ext = 'data'
datatype = registry.get_datatype_by_extension(ext)
if link_data_only:
# Never alter a file that will not be copied to Galaxy's local file store.
if datatype.dataset_content_needs_grooming(path):
err_msg = 'The uploaded files need grooming, so change your <b>Copy data into Galaxy?</b> selection to be ' + \
'<b>Copy files into Galaxy</b> instead of <b>Link to files without copying into Galaxy</b> so grooming can be performed.'
raise UploadProblemException(err_msg)
# If this file is not in the workdir make sure it gets there.
if not link_data_only and converted_path:
path = upload_config.ensure_in_working_directory(converted_path, purge_source, in_place)
elif not link_data_only:
path = upload_config.ensure_in_working_directory(path, purge_source, in_place)
if not link_data_only and datatype and datatype.dataset_content_needs_grooming(path):
# Groom the dataset content if necessary
datatype.groom_dataset_content(path)
rval = {"name": name, "filename": path, "dbkey": dbkey, "ext": ext, "link_data_only": link_data_only}
if info is not None:
rval["info"] = info
if object_id is not None:
rval["object_id"] = object_id
return rval
elements = elements_tree_map(_resolve_src, items)
fetched_target["elements"] = elements
return fetched_target
def _bagit_to_items(directory):
bdbag.bdbag_api.resolve_fetch(directory)
bdbag.bdbag_api.validate_bag(directory)
items = _directory_to_items(os.path.join(directory, "data"))
return items
def _decompress_target(target):
elements_from_name, elements_from_path = _has_src_to_path(target)
temp_directory = tempfile.mkdtemp(prefix=elements_from_name, dir=".")
decompressed_directory = CompressedFile(elements_from_path).extract(temp_directory)
return decompressed_directory
def elements_tree_map(f, items):
new_items = []
for item in items:
if "elements" in item:
new_item = item.copy()
new_item["elements"] = elements_tree_map(f, item["elements"])
new_items.append(new_item)
else:
new_items.append(f(item))
return new_items
def _directory_to_items(directory):
items = []
dir_elements = {}
for root, dirs, files in os.walk(directory):
if root in dir_elements:
target = dir_elements[root]
else:
target = items
for dir in sorted(dirs):
dir_dict = {"name": dir, "elements": []}
dir_elements[os.path.join(root, dir)] = dir_dict["elements"]
target.append(dir_dict)
for file in sorted(files):
target.append({"src": "path", "path": os.path.join(root, file)})
return items
def _has_src_to_path(item):
assert "src" in item, item
src = item.get("src")
name = item.get("name")
if src == "url":
url = item.get("url")
path = sniff.stream_url_to_file(url)
if name is None:
name = url.split("/")[-1]
else:
assert src == "path"
path = item["path"]
if name is None:
name = os.path.basename(path)
return name, path
def _arg_parser():
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument("--galaxy-root")
parser.add_argument("--datatypes-registry")
parser.add_argument("--request-version")
parser.add_argument("--request")
return parser
class UploadConfig(object):
def __init__(self, request, registry):
self.registry = registry
self.check_content = request.get("check_content" , True)
self.to_posix_lines = request.get("to_posix_lines", False)
self.space_to_tab = request.get("space_to_tab", False)
self.link_data_only = _link_data_only(request)
self.__workdir = os.path.abspath(".")
self.__upload_count = 0
def get_option(self, item, key):
"""Return item[key] if specified otherwise use default from UploadConfig.
This default represents the default for the whole request instead item which
is the option for individual files.
"""
if key in item:
return item[key]
else:
return getattr(self, key)
def __new_dataset_path(self):
path = "gxupload_%d" % self.__upload_count
self.__upload_count += 1
return path
def ensure_in_working_directory(self, path, purge_source, in_place):
if in_directory(path, self.__workdir):
return path
new_path = self.__new_dataset_path()
if purge_source:
try:
shutil.move(path, new_path)
except OSError as e:
# We may not have permission to remove converted_path
if e.errno != errno.EACCES:
raise
else:
shutil.copy(path, new_path)
return new_path
def _link_data_only(has_config_dict):
link_data_only = has_config_dict.get("link_data_only", False)
if not isinstance(link_data_only, bool):
# Allow the older string values of 'copy_files' and 'link_to_files'
link_data_only = link_data_only == "copy_files"
return link_data_only
def _for_each_src(f, obj):
if isinstance(obj, list):
for item in obj:
_for_each_src(f, item)
if isinstance(obj, dict):
if "src" in obj:
f(obj)
for key, value in obj.items():
_for_each_src(f, value)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
from concurrent.futures import ThreadPoolExecutor
import lib.HackRequests as HackRequests
task_status = 0
def uploadfile(data):
global task_status
if task_status==1:
return 'Success'
hack = HackRequests.hackRequests()
hack.httpraw(data)
def requestfile(url):
global task_status
if task_status==1:
return 'Success'
hack = HackRequests.hackRequests()
req = hack.http(url)
if req.status_code == 200:
print('[+] Success!')
task_status = 1
def race(data,url):
with ThreadPoolExecutor(20) as pool:
for i in range(1000):
pool.submit(uploadfile,data)
pool.submit(requestfile,url)
|
nilq/baby-python
|
python
|
#In this assignment you will write two functions. Your functions should not make any print statements.
#Any printing should be done by driver code outside the functions.
#Problem 1:
#Write a function tha will take two parameters: city and country. You can name it whatever you want.
#The function should return a formatted string in the form 'City, Country'.
#For example, if city='sanTiago' and country='ChiLE', then the function should return the string
# 'Santiago, Chile'.
#Remember, any printing should be done from the driver code, not within the function.
#YOUR CODE GOES HERE:
#Problem 2:
#Write a function called double_sum which will take two parameters num1 and num2 and will return
# twice the sum of the numbers. For example, double_sum(3,5) should return 16.
#As in the first problem. The function not do any printing.
#YOUR CODE GOES HERE:
|
nilq/baby-python
|
python
|
from infi.clickhouse_orm import migrations # type: ignore
from ee.clickhouse.sql.events import (
EVENTS_WITH_PROPS_TABLE_SQL,
MAT_EVENT_PROP_TABLE_SQL,
MAT_EVENTS_WITH_PROPS_TABLE_SQL,
)
operations = [
migrations.RunSQL(EVENTS_WITH_PROPS_TABLE_SQL),
migrations.RunSQL(MAT_EVENTS_WITH_PROPS_TABLE_SQL),
migrations.RunSQL(MAT_EVENT_PROP_TABLE_SQL),
]
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# coding=utf-8
# This file is copied from torchvision.models
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import tensorboardX as tbx
import libs.configs.config as cfg
from .focal_loss import FocalLoss
from .smooth_l1_loss import smooth_l1_loss
from libs.layers.box import decoding_box, apply_nms
from libs.nets.utils import everything2numpy, everything2cuda
class detection_model(nn.Module):
"""
This module apply backbone network, build a pyramid, then add rpns for all layers in the pyramid.
"""
def __init__(self, backbone, num_classes, num_anchors, is_training=True, maxpool5=True):
super(detection_model, self).__init__()
self.backbone = backbone
self.num_classes = num_classes # number of classes for rpn
self.num_anchors = num_anchors
self.is_training = is_training
self.rpn_activation = cfg.class_activation
self.rpn_outs = []
self.loss_dict = []
self.with_segment = cfg.with_segment
self._score_summaries = {}
self._hist_summaries = {}
self.global_step = 0
self.anchors = None # must be set via running setup()
self.maxpool5 = maxpool5
if is_training:
self.rpn_cls_loss_func = FocalLoss(gamma=2, alpha=0.25, activation=self.rpn_activation) \
if cfg.use_focal_loss else nn.CrossEntropyLoss()
def forward(self, input, gt_boxes_list, anchors_np):
pass
def _objectness(self, probs, activation=None):
activation = self.rpn_activation if activation is None else activation
if activation == 'softmax':
return 1. - probs[:, 0]
elif activation == 'sigmoid':
return probs.max(dim=1)[0]
else:
raise ValueError('Unknown activation funtion %s' % self.activation)
def _rerange(self, rpn_outs, last_dimension=None):
"""rerange (Pyramid, N, C, H, W) outputs to (NxLxHxW, C)"""
last_dimension = self.num_classes if last_dimension is None else last_dimension
n = rpn_outs[0][0].size()[0]
c = rpn_outs[0][0].size()[1]
cb = rpn_outs[0][1].size()[1]
rpn_logit = [rpn[0].view(n, c, -1) for rpn in rpn_outs]
rpn_box = [rpn[1].view(n, cb, -1) for rpn in rpn_outs]
rpn_logit = torch.cat(rpn_logit, dim=2)
rpn_box = torch.cat(rpn_box, dim=2)
rpn_logit = rpn_logit.permute(0, 2, 1).contiguous().view(-1, last_dimension)
num_endpoints = rpn_logit.size()[0]
rpn_box = rpn_box.permute(0, 2, 1).contiguous().view(num_endpoints, -1)
return rpn_logit, rpn_box
def _stage_one_results(self, rpn_box, rpn_prob, anchors, top_n=2000,
overlap_threshold=0.7,
top_n_post_nms=None):
boxes, probs, img_ids, anchors = \
self._decode_and_choose_top_n_stage1(rpn_box, rpn_prob, anchors, top_n=top_n)
boxes, probs, img_ids, anchors = \
self._apply_nms_in_batch(boxes, probs, img_ids, anchors,
activation=self.rpn_activation,
overlap_threshold=overlap_threshold)
if top_n_post_nms is not None:
return boxes[:top_n_post_nms], probs[:top_n_post_nms], img_ids[:top_n_post_nms]
return boxes, probs, img_ids
def _thresholding(self, boxes, probs, batch_ids, score_threshold=0.1):
objness = self._objectness(probs)
inds = objness.data.ge(score_threshold).nonzero().view(-1)
if inds.numel() == 0:
_, inds = objness.sort(dim=0, descending=True)
inds = inds[:10]
boxes = boxes[inds]
probs = probs[inds]
batch_ids = batch_ids[inds]
return boxes, probs, batch_ids
def build_losses_rpn(self, rpn_logits, rpn_box, rpn_prob,
rpn_labels, rpn_bboxes, rpn_bbwghts):
"""With OHEM (Online Hard Example Mining)"""
rpn_labels = rpn_labels.view(-1).long()
assert rpn_logits.size()[0] == rpn_box.size()[0] == rpn_labels.size()[0], \
'Dimension dont match %d vs %d vs %d' % (rpn_logits.size()[0], rpn_box.size()[0], rpn_labels.size()[0])
if cfg.use_focal_loss:
rpn_logits, rpn_labels = self._sample_valid(rpn_logits, rpn_labels)
else:
rpn_logits, rpn_labels = self._sample_OHEM(rpn_logits, rpn_labels, rpn_prob, rpn_box,
bg_fg_ratio=3)
rpn_cls_loss = self.rpn_cls_loss_func(rpn_logits, rpn_labels)
# build box loss
rpn_bbwghts = rpn_bbwghts.view(-1, 4)
rpn_bboxes = rpn_bboxes.view(-1, 4)
bb_nums = torch.sum(rpn_bbwghts.data.gt(0).float())
bbwght_outside = (rpn_bbwghts > 0.0001).float() / max(bb_nums, 1.0)
rpn_box_loss = smooth_l1_loss(rpn_box, rpn_bboxes, rpn_bbwghts, bbwght_outside, sigma=1.0)
return rpn_cls_loss, rpn_box_loss
def build_losses_rpn_faster_rcnn(self, rpn_logits, rpn_box, rpn_prob,
rpn_labels, rpn_bboxes, rpn_bbwghts):
"""No OHEM (Online Hard Example Mining)"""
rpn_labels = rpn_labels.view(-1).long()
assert rpn_logits.size()[0] == rpn_box.size()[0] == rpn_labels.size()[0], \
'Dimension dont match %d vs %d vs %d' % (rpn_logits.size()[0], rpn_box.size()[0], rpn_labels.size()[0])
rpn_logits, rpn_labels, all_rpn_labels = \
self._sample_faster_rcnn(rpn_logits, rpn_labels, rpn_prob, rpn_box,
rpn_batch_size=256, rpn_fg_fraction=0.5)
rpn_cls_loss = F.cross_entropy(rpn_logits, rpn_labels, ignore_index=-1)
# build box loss
rpn_bbwghts = rpn_bbwghts.view(-1, 4)
rpn_bboxes = rpn_bboxes.view(-1, 4)
bb_nums = all_rpn_labels.eq(1).sum().item()
bbwght_outside = all_rpn_labels.eq(1).float() / max(bb_nums * 4, 4.0)
bbwght_outside = bbwght_outside.view(-1, 1)
rpn_box_loss = smooth_l1_loss(rpn_box, rpn_bboxes, rpn_bbwghts, bbwght_outside, sigma=1.0)
return rpn_cls_loss, rpn_box_loss
def build_losses(self, outputs, targets):
pass
def loss(self):
pass
def cls_loss(self):
return self.loss_dict['rpn_cls_loss']
def box_loss(self):
return self.loss_dict['rpn_box_loss']
def _gather_fg(self, labels, boxes, logits):
"""choose all bgs, sort them, pick top_n bgs"""
fg_inds = labels.data.ge(1).nonzero().view(-1)
if fg_inds.numel() > 0:
return labels[fg_inds], boxes[fg_inds], logits[fg_inds], fg_inds
else:
return None, None, None, fg_inds
def _gather_bg(self, labels, probs, logits, top_n=2000):
"""choose all bgs, sort them, pick top_n bgs"""
bg_inds = labels.data.eq(0).nonzero().view(-1)
probs = probs[bg_inds]
logits = logits[bg_inds]
# objness = 1. - probs[:, 0]
objness = self._objectness(probs)
_, inds = objness.sort(dim=0, descending=True)
top_n = min(top_n, inds.size(0))
inds = inds[:top_n]
return probs[inds], logits[inds], bg_inds[inds.data]
def _sample_OHEM(self, rpn_logits, rpn_label, rpn_prob, rpn_boxes, bg_fg_ratio=3):
rpn_prob.detach()
fg_labels, fg_boxes, fg_logits, fg_inds = self._gather_fg(rpn_label, rpn_boxes, rpn_logits)
fg_num = fg_inds.numel()
top_n = max(fg_num * bg_fg_ratio, 16)
bg_probs, bg_logits, bg_inds = self._gather_bg(rpn_label, rpn_prob, rpn_logits, top_n=top_n)
bg_num = bg_inds.numel()
# bg_objness = 1 - bg_probs[:, 0]
bg_objness = self._objectness(bg_probs)
if fg_inds is not None:
chosen_inds = torch.cat((fg_inds, bg_inds), dim=0)
else:
chosen_inds = bg_inds
labels = rpn_label[chosen_inds]
if self.global_step % cfg.log_image == 0 and fg_num > 1:
c = rpn_logits.size(1)
sampled_fg_losses = 0.5 * torch.abs(self._to_one_hot(fg_labels, c) - rpn_prob[fg_inds]).sum(dim=1)
self._score_summaries['Sample/PosLoss'] = sampled_fg_losses
self._score_summaries['Sample/PosLossMax'] = sampled_fg_losses.max()
bg_probs_all, _, _ = self._gather_bg(rpn_label, rpn_prob, rpn_logits, top_n=float('inf'))
bg_objness_all = 1. - bg_probs_all[:, 0]
self._score_summaries['Sample/NegLoss'] = bg_objness_all
self._score_summaries['Sample/NegLoss_SampledMax'] = bg_objness.max()
self._score_summaries['Sample/NegLoss_Sampled'] = bg_objness
self._score_summaries['Sample/FG_nums'] = fg_num
self._score_summaries['Sample/BG_nums'] = bg_num
self.global_step += 1
logits = rpn_logits[chosen_inds]
return logits.contiguous(), labels.contiguous()
def _sample_faster_rcnn_OHEM(self, rpn_logits, rpn_label, rpn_prob, rpn_boxes,
rpn_batch_size=256, rpn_fg_fraction=0.5):
"""Always sample rpn_batch_size examples. Even negative ones may dominate examples.
Hopefully this is moderate than OHEM (FocalLoss > OHEM > this-sampler > _sample_faster_rcnn)
"""
rpn_prob.detach()
fg_inds = rpn_label.data.ge(1).nonzero().view(-1)
fg_num = fg_inds.numel()
fg_num_ = min(int(rpn_batch_size * rpn_fg_fraction), fg_num)
if fg_num_ > 0:
inds = torch.randperm(fg_num)[:fg_num_]
fg_inds = fg_inds[inds]
bg_inds = rpn_label.data.eq(0).nonzero().view(-1)
bg_num = bg_inds.numel()
bg_num_ = min(rpn_batch_size - fg_num_, bg_num)
bg_probs, bg_logits, bg_inds = self._gather_bg(rpn_label, rpn_prob, rpn_logits, top_n=bg_num_)
chosen_inds = torch.cat((fg_inds, bg_inds), dim=0)
labels = rpn_label[chosen_inds]
logits = rpn_logits[chosen_inds]
all_labels = torch.zeros_like(rpn_label) - 1
all_labels[fg_inds] = 1
all_labels[bg_inds] = 0
if self.global_step % cfg.log_image == 0 and fg_num > 1:
self._score_summaries['Sample/FG_nums_total'] = fg_num
self._score_summaries['Sample/BG_nums_total'] = bg_num
self._score_summaries['Sample/FG_nums_train'] = fg_num_
self._score_summaries['Sample/BG_nums_train'] = bg_num_
self.global_step += 1
return logits.contiguous(), labels.contiguous(), all_labels
def _sample_faster_rcnn(self, rpn_logits, rpn_label, rpn_prob, rpn_boxes,
rpn_batch_size=256, rpn_fg_fraction=0.5):
rpn_prob.detach()
fg_inds = rpn_label.data.ge(1).nonzero().view(-1)
fg_num = fg_inds.numel()
fg_num_ = min(int(rpn_batch_size * rpn_fg_fraction), fg_num)
if fg_num_ > 0:
inds = torch.randperm(fg_num)[:fg_num_]
fg_inds = fg_inds[inds]
bg_inds = rpn_label.data.eq(0).nonzero().view(-1)
bg_num = bg_inds.numel()
bg_num_ = min(rpn_batch_size - fg_num_, bg_num)
if bg_num_ > 0:
inds = torch.randperm(bg_num)[:bg_num_]
bg_inds = bg_inds[inds]
chosen_inds = torch.cat((fg_inds, bg_inds), dim=0)
labels = rpn_label[chosen_inds]
logits = rpn_logits[chosen_inds]
all_labels = torch.zeros_like(rpn_label) - 1
all_labels[fg_inds] = 1
all_labels[bg_inds] = 0
if self.global_step % cfg.log_image == 0 and fg_num > 1:
self._score_summaries['Sample/FG_nums_total'] = fg_num
self._score_summaries['Sample/BG_nums_total'] = bg_num
self._score_summaries['Sample/FG_nums_train'] = fg_num_
self._score_summaries['Sample/BG_nums_train'] = bg_num_
self.global_step += 1
return logits.contiguous(), labels.contiguous(), all_labels
def _sample_valid(self, rpn_logits, rpn_labels):
# rpn_prob.detach()
valid_inds = rpn_labels.data.ge(0).nonzero().view(-1)
logits, labels = rpn_logits[valid_inds], rpn_labels[valid_inds]
return logits.contiguous(), labels.contiguous()
def _decode_and_choose_top_n_stage1(self, rpn_box, rpn_prob, anchors, top_n=1000):
objness = self._objectness(rpn_prob)
_, inds = objness.sort(dim=0, descending=True)
inds = inds[:top_n]
selected_boxes = rpn_box[inds]
selected_probs = rpn_prob[inds]
anchor_ids = inds % anchors.size(0)
selected_anchors = anchors[anchor_ids]
selected_boxes = decoding_box(selected_boxes, selected_anchors, box_encoding=cfg.rpn_box_encoding)
selected_img_ids = inds / anchors.size(0)
return selected_boxes, selected_probs, selected_img_ids, selected_anchors
def _decoding_and_thresholding_stage1(self, rpn_box, rpn_prob, anchors, score_threshold=0.3, max_dets=100):
selected_boxes, selected_probs, selected_img_ids, selected_anchors = \
self._decode_and_choose_top_n_stage1(rpn_box, rpn_prob, anchors, top_n=max_dets * 3)
objness = self._objectness(selected_probs)
inds = objness.data.ge(score_threshold).nonzero().view(-1)
if inds.numel() == 0:
_, inds = objness.sort(dim=0, descending=True)
inds = inds[:1]
selected_boxes = selected_boxes[inds]
selected_probs = selected_probs[inds]
selected_img_ids = selected_img_ids[inds]
selected_anchors = selected_anchors[inds]
return selected_boxes, selected_probs, selected_img_ids, selected_anchors
@staticmethod
def _apply_nms_in_batch(boxes, probs, img_ids, anchors, activation, overlap_threshold=0.5):
"""apply non-maximum suppression for multiple images in a mini-batch"""
objness = probs.max(dim=1)[0] if activation == 'sigmoid' else 1. - probs[:, 0]
nmax = img_ids.max().cpu().data.numpy()
nmin = img_ids.min().cpu().data.numpy()
all_keeps = []
for i in range(nmin, nmax + 1):
inds = img_ids.data.eq(i).nonzero().view(-1)
if inds.numel() > 0:
keeps = apply_nms(boxes[inds][:, :4], objness[inds], overlap_threshold=overlap_threshold)
all_keeps.append(inds[keeps])
all_keeps = torch.cat(all_keeps, dim=0) if len(all_keeps) > 1 else all_keeps[0]
return boxes[all_keeps], probs[all_keeps], img_ids[all_keeps], anchors[all_keeps]
@staticmethod
def to_Dets(boxes, probs, img_ids):
"""for each bbox, assign the class with the max prob"""
boxes, probs, img_ids = everything2numpy([boxes, probs, img_ids])
Dets = []
for i in range(0, cfg.batch_size):
inds = np.where(img_ids == i)[0]
probs_ = probs[inds]
boxes_ = boxes[inds]
if probs_.shape[1] == 2:
cls_ids = np.ones((probs_.shape[0], ), dtype=np.int32)
cls_probs = probs_[:, 1]
else:
cls_ids = probs_[:, 1:].argmax(axis=1) + 1
cls_probs = probs_[np.arange(probs_.shape[0]), cls_ids]
dets = np.concatenate((boxes_.reshape(-1, 4),
cls_probs[:, np.newaxis],
cls_ids[:, np.newaxis]), axis=1)
Dets.append(dets)
return Dets
@staticmethod
def to_Dets_sigmoid(boxes, probs, img_ids):
"""for each bbox, assign the class with the max prob,
NOTE: there is no background class, so the implementation is slightly different"""
boxes, probs, img_ids = everything2numpy([boxes, probs, img_ids])
Dets = []
for i in range(0, cfg.batch_size):
inds = np.where(img_ids == i)[0]
probs_ = probs[inds]
boxes_ = boxes[inds]
if probs_.ndim == 1 or probs_.shape[1] == 1:
cls_ids = np.ones((probs_.shape[0], ), dtype=np.int32)
cls_probs = probs_.view(-1)
else:
cls_ids = probs_.argmax(axis=1) + 1
cls_probs = probs_.max(axis=1)
dets = np.concatenate((boxes_.reshape(-1, 4),
cls_probs[:, np.newaxis],
cls_ids[:, np.newaxis]), axis=1)
Dets.append(dets)
return Dets
@staticmethod
def to_Dets2(boxes, probs, img_ids, score_threshold=0.1):
"""for each box, there may be more than one class labels"""
boxes, probs, img_ids = everything2numpy([boxes, probs, img_ids])
Dets = []
for i in range(0, cfg.batch_size):
inds = np.where(img_ids == i)[0]
probs_ = probs[inds]
boxes_ = boxes[inds]
if probs_.shape[1] == 2:
cls_ids = np.ones((probs_.shape[0], ), dtype=np.int32)
cls_probs = probs_[:, 1]
dets = np.concatenate((boxes_.reshape(-1, 4),
cls_probs[:, np.newaxis],
cls_ids[:, np.newaxis]), axis=1)
else:
d0_inds, d1_inds = np.where(probs_[:, 1:] > score_threshold)
if d0_inds.size > 0:
cls_ids = d1_inds + 1
cls_probs = probs_[d0_inds, cls_ids]
boxes_ = boxes_[d0_inds, :]
dets = np.concatenate((boxes_.reshape(-1, 4),
cls_probs[:, np.newaxis],
cls_ids[:, np.newaxis]), axis=1)
else:
cls_ids = probs_[:, 1:].argmax(axis=1) + 1
cls_probs = probs_[np.arange(probs_.shape[0]), cls_ids]
dets = np.concatenate((boxes_.reshape(-1, 4),
cls_probs[:, np.newaxis],
cls_ids[:, np.newaxis]), axis=1)
Dets.append(dets)
return Dets
@staticmethod
def to_Dets2_sigmoid(boxes, probs, img_ids, score_threshold=0.1):
boxes, probs, img_ids = everything2numpy([boxes, probs, img_ids])
Dets = []
for i in range(0, cfg.batch_size):
inds = np.where(img_ids == i)[0]
probs_ = probs[inds]
boxes_ = boxes[inds]
if probs_.ndim == 1 or probs_.shape[1] == 1:
cls_ids = np.ones((probs_.shape[0], ), dtype=np.int32)
cls_probs = probs_.view(-1)
dets = np.concatenate((boxes_.reshape(-1, 4),
cls_probs[:, np.newaxis],
cls_ids[:, np.newaxis]), axis=1)
else:
d0_inds, d1_inds = np.where(probs_ > score_threshold)
if d0_inds.size > 0:
cls_ids = d1_inds + 1
cls_probs = probs_[d0_inds, d1_inds]
boxes_ = boxes_[d0_inds, :]
dets = np.concatenate((boxes_.reshape(-1, 4),
cls_probs[:, np.newaxis],
cls_ids[:, np.newaxis]), axis=1)
else:
cls_ids = probs_.argmax(axis=1) + 1
cls_probs = probs_[np.arange(probs_.shape[0]), cls_ids - 1]
dets = np.concatenate((boxes_.reshape(-1, 4),
cls_probs[:, np.newaxis],
cls_ids[:, np.newaxis]), axis=1)
Dets.append(dets)
return Dets
def get_final_results(self, outputs, anchors, **kwargs):
pass
def get_final_results_stage1(self, rpn_box, rpn_prob, anchors,
score_threshold=0.1,
max_dets=100,
overlap_threshold=0.5):
selected_boxes, selected_probs, selected_img_ids, selected_anchors = \
self._decoding_and_thresholding_stage1(rpn_box, rpn_prob, anchors,
score_threshold=score_threshold,
max_dets=max_dets * 3)
selected_boxes, selected_probs, selected_img_ids, selected_anchors = \
self._apply_nms_in_batch(selected_boxes, selected_probs,
selected_img_ids, selected_anchors,
activation=self.rpn_activation,
overlap_threshold=overlap_threshold)
if self.rpn_activation == 'softmax':
Dets = self.to_Dets2(selected_boxes, selected_probs, selected_img_ids, score_threshold)
elif self.rpn_activation == 'sigmoid':
Dets = self.to_Dets2_sigmoid(selected_boxes, selected_probs, selected_img_ids, score_threshold)
else:
raise ValueError('Unknown activation function %s' % self.rpn_activation)
return Dets
def get_pos_anchors(self, score_threshold=0.1, max_dets=100):
_, selected_probs, selected_img_ids, selected_anchors = \
self._decoding_and_thresholding_stage1(score_threshold=score_threshold, max_dets=max_dets)
if self.rpn_activation == 'softmax':
Dets = self.to_Dets(selected_anchors, selected_probs, selected_img_ids)
elif self.rpn_activation == 'sigmoid':
Dets = self.to_Dets_sigmoid(selected_anchors, selected_probs, selected_img_ids)
else:
raise ValueError('Unknown activation function %s' % self.rpn_activation)
return Dets
def _to_one_hot(self, y, num_classes):
c = num_classes + 1 if self.rpn_activation == 'sigmoid' else num_classes
y_ = torch.FloatTensor(y.size()[0], c).zero_()
y_ = y_.scatter_(1, y.view(-1, 1).data.cpu(), 1.0).cuda()
if self.rpn_activation == 'sigmoid':
y_ = y_[:, 1:]
if y.is_cuda:
y_ = y_.cuda()
return y_
def de_frozen_backbone(self):
self.backbone.de_frozen()
def _add_scalar_summary(self, key, tensor):
if isinstance(tensor, torch.Tensor):
return tbx.summary.scalar(key + '/L1', torch.abs(tensor).mean().data.cpu().numpy())
elif isinstance(tensor, float) or isinstance(tensor, int):
return tbx.summary.scalar(key, tensor)
def _add_hist_summary(self, key, tensor):
return tbx.summary.histogram(key, tensor.data.cpu().numpy(), bins='auto')
def get_summaries(self, is_training=True):
"""
Run the summary operator: feed the placeholders with corresponding newtork outputs(activations)
"""
summaries = []
for key, var in self._score_summaries.items():
summaries.append(self._add_scalar_summary(key, var))
self._score_summaries = {}
# Add act summaries
# for key, var in self._hist_summaries.items():
# summaries += self._add_hist_summary(key, var)
self._hist_summaries = {}
# Add train summaries
if is_training:
for k, var in dict(self.named_parameters()).items():
if var.requires_grad:
# summaries.append(self._add_hist_summary(k, var))
summaries.append(self._add_scalar_summary('Params/' + k, var))
summaries.append(self._add_scalar_summary('Grads/' + k, var.grad))
return summaries
|
nilq/baby-python
|
python
|
from flask import Flask, request, jsonify
from flask_cors import CORS, cross_origin
import json
import os
import random
import requests
import src.services.providers.python.fetch_git_data
import src.services.providers.python.fetch_lingo_data
import src.services.providers.python.fetch_lab_data
def create_app():
app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
@app.route('/data', methods=["GET"])
def check():
data = json.loads(request.get_data().decode('UTF-8'))
extension = data["extension"]
if extension == "github":
return fetch_git_data.github_data(data["account"])
elif extension == "duolingo":
return fetch_lingo_data.duolingo_data(data["account"])
elif extension == "gitlab":
return fetch_lab_data.gitlab_data(data["account"])
return app
|
nilq/baby-python
|
python
|
import StockAnalysisSystem.core.api as sasApi
from StockAnalysisSystem.core.SubServiceManager import SubServiceContext
from StockAnalysisSystem.core.Utility.relative_import import RelativeImport
from StockAnalysisSystem.core.Utility.event_queue import Event, EventDispatcher
with RelativeImport(__file__):
from WebServiceProvider.service_provider import ServiceProvider
SERVICE_ID = '0ea2afb5-3350-46e8-af1b-2e7ff246a1ff'
# ----------------------------------------------------------------------------------------------------------------------
class TerminalService:
def __init__(self):
self.__service_provider: SubServiceContext = None
self.__subService_context: SubServiceContext = None
def init(self, sub_service_context: SubServiceContext):
self.__service_provider = ServiceProvider()
self.__subService_context = sub_service_context
self.__service_provider.check_init(sub_service_context.sas_if,
sub_service_context.sas_api)
return self.__service_provider.is_inited()
def interact(self, text: str, **kwargs) -> any:
return self.__service_provider.terminal_interact(text, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
def plugin_prob() -> dict:
return {
'plugin_id': SERVICE_ID,
'plugin_name': 'terminal_service',
'plugin_version': '0.0.0.1',
'tags': ['Terminal', 'Sleepy'],
}
def plugin_adapt(service: str) -> bool:
return service == SERVICE_ID
def plugin_capacities() -> list:
return [
'api', # Provides functions like sys call
# 'thread', # SubService manager will create a thread for this service
# 'polling', # polling() function will be invoked while event processing thread is free
'event_handler' # SubService can handle events that dispatch to it
]
# ----------------------------------------------------------------------------------------------------------------------
eventDispatcher = EventDispatcher(in_private_thread=False, name=SERVICE_ID)
terminalService = TerminalService()
def init(sub_service_context: SubServiceContext) -> bool:
try:
return terminalService.init(sub_service_context)
except Exception as e:
import traceback
print('Plugin-in init error: ' + str(e))
print(traceback.format_exc())
finally:
pass
return True
def startup() -> bool:
eventDispatcher.register_invoke_handler('interact', terminalService.interact)
return True
def teardown() -> bool:
if eventDispatcher is not None:
eventDispatcher.teardown()
return True
# def thread(context: dict):
# pass
# def polling(interval_ns: int):
# pass
def event_handler(event: Event, sync: bool, **kwargs):
eventDispatcher.dispatch_event(event, sync)
|
nilq/baby-python
|
python
|
"""'On-fly' avatar changer.
This script allows to change avatar of bot while it's running.
Script gets randomly choosen avatar data to replace current avatar.
This file can also be imported as a module and contains the following functions:
* get_avatar_bytes - gets bytes from avatar picture
"""
import pathlib
import random
import time
import src.lib.database as database
CHANGE_COOLDOWN = 900
def get_avatar_bytes(avatar_cooldown=None):
"""Get bytes from avatar picture.
This function has built-in check for
avatar change cooldown
Args:
avatar_cooldown (Union[int, None]): Cooldown for setting new avatar
Returns:
Union[int, list[bytes, int]]:
Current cooldown time or bytes of PNG w/ new cooldown time
"""
if not avatar_cooldown:
avatar_cooldown = database.get_data(
"mainDB",
True,
"SELECT avatar_cooldown FROM variables",
)
curr_time = int(time.time())
curr_cooldown = avatar_cooldown - curr_time
if curr_cooldown > 0:
return {
"avatar_cooldown": avatar_cooldown,
"curr_cooldown": int(curr_cooldown),
"avatar_bytes": None,
}
new_avatar_cooldown = curr_time + avatar_cooldown
database.modify_data("mainDB", "UPDATE variables SET avatar_cooldown = ?",
new_avatar_cooldown)
avatar_path = (f"{pathlib.Path().absolute()}/src/avatars/"
f"Avatar_{random.randint(1, 16)}.png")
with open(avatar_path, "rb") as f:
avatar_bytes = f.read()
f.close()
return {
"avatar_cooldown": new_avatar_cooldown,
"curr_cooldown": None,
"avatar_bytes": avatar_bytes,
}
|
nilq/baby-python
|
python
|
import queue
from typing import Tuple
from agents.abstract_agent import AbstractAgent
from games.game import Game
from utils import print_turn
from utils import print_board
from utils import print_move
from utils import print_visit_count
from utils import print_winner
from games.game_types import UIEvent
class CUI:
def __init__(self, game_type: str, rule_type: str, players: Tuple[AbstractAgent, AbstractAgent]):
"""[summary]
Play game on CUI.
Args:
game_type ([type]): [description]
rule_type ([type]): [description]
players ([type]): [description]
"""
self.event_queue = queue.Queue()
self.game = Game(game_type, rule_type, players, self.event_queue)
self.board_size = self.game.get_board_size()
def run(self) -> None:
self.game.start()
game_over = False
while not game_over:
event, val = self.event_queue.get()
self.event_queue.task_done()
if event == UIEvent.BOARD:
print_board(val)
elif event == UIEvent.VISIT_COUNTS:
print_visit_count(val)
elif event == UIEvent.LAST_MOVE:
print_move(val)
print_turn(self.game.get_game_state())
elif event == UIEvent.GAME_OVER:
print_winner(val)
game_over = True
def get_game(self) -> Game:
return self.game
|
nilq/baby-python
|
python
|
import numpy as np
import threading
#from federatedml.ftl.encryption import mulmatOT
import mulmatOT
import sys,getopt
import socket
import pickle
from time import *
BITS_LEN=16
ROW_A=6
COL_A=6
ROW_B=6
COL_B=6
# a=np.array([[ 0.00514600],
# [ 0.02252000],
# [-0.01941000],
# [ 0.04263000],
# [-0.01234000],
# [ 0.00293600]])
b=np.array([[9.11100000e-02,9.75300000e-01,2.48500000e-02,9.99900000e-01,9.99900000e-01,9.22400000e-02],
[9.97800000e-01,1.06200000e-02,2.26100000e-01,1.81900000e-03,4.35400000e-01,2.93100000e-01],
[9.93200000e-01,7.00700000e-01,4.09000000e-03,5.53100000e-01,1.94200000e-01,4.91300000e-01],
[2.14800000e-01,6.46900000e-01,3.31300000e-01,6.51400000e-01,6.66900000e-01,4.70700000e-01],
[9.95100000e-01,9.54300000e-01,7.41300000e-01,9.96100000e-01,5.02900000e-02,7.40100000e-02],
[2.47100000e-01,9.99900000e-01,9.83900000e-03,6.57200000e-01,7.32900000e-02,8.37100000e-04]])
# a=np.array([[-100.1,2087,378.1],[-408.1,560.1,600.8]])
# b=np.array([[9.11100000e-02,9.75300000e-01],[2.48500000e-02,9.99900000e-01]])
# a=np.array([[-100.1,2087,378.1,200.-1,376.6,5801],[-408.1,560.1,600.8,5007,-108.6,250.8]])
# b=np.array([[145.5,27.71],[307.1,-49.01],[550.1,652.1],[-30.01,400.1],[370.1,-405.1],[308.9,470.1]])
# a=np.array([[9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07]])
# a=a.transpose()
# b=np.array([[9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07],
# [9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01],
# [9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01],
# [2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01],
# [9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02],
# [2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04],
# [9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07],
# [9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01],
# [9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01],
# [2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01],
# [9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02],
# [2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04],
# [9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07],
# [9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01],
# [9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01],
# [2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01],
# [9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02],
# [2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04],
# [9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07],
# [9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01],
# [9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01],
# [2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01],
# [9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02],
# [2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04],
# [9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.11183649e-09,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07,9.75335508e-01,2.48583458e-09,9.99946741e-01,9.99953763e-01,9.22442917e-07],
# [9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,9.97868177e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01,1.06257031e-02,2.26146074e-01,1.81942443e-03,4.35402792e-01,2.93155487e-01],
# [9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,9.93293351e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01,7.00731607e-01,4.09086882e-03,5.53100605e-01,1.94269353e-01,4.91364907e-01],
# [2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,2.14898568e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01,6.46915583e-01,3.31391350e-01,6.51451235e-01,6.66928566e-01,4.70721486e-01],
# [9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.95129221e-01,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02,9.54346714e-01,7.41353410e-01,9.96145008e-01,5.02976012e-02,7.40109476e-02],
# [2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,2.47149265e-01,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04,9.99999985e-01,9.83987704e-03,6.57201027e-01,7.32935665e-02,8.37128700e-04]])
a=b
# a=np.array([[1,2,-3],[4,5,6]])
# b=np.array([[1,-2],[3,4],[5,6]])
# a=np.array([[ 0.00514600],
# [ 0.02252000],
# [-0.01941000],
# [ 0.04263000],
# [-0.01234000],
# [ 0.00514600],
# [ 0.02252000],
# [-0.01941000],
# [ 0.04263000],
# [-0.01234000],
# [ 0.00514600],
# [ 0.00514600]])
# b=np.array([[9.11100000e-02,9.75300000e-01,2.48500000e-02,9.99900000e-01,9.99900000e-01,9.22400000e-02,9.75300000e-01,2.48500000e-02,9.99900000e-01,9.99900000e-01,9.22400000e-02,9.75300000e-01],
# [9.97800000e-01,1.06200000e-02,2.26100000e-01,1.81900000e-03,4.35400000e-01,2.93100000e-01,1.06200000e-02,2.26100000e-01,1.81900000e-03,4.35400000e-01,2.93100000e-01,1.06200000e-02],
# [9.93200000e-01,7.00700000e-01,4.09000000e-03,5.53100000e-01,1.94200000e-01,4.91300000e-01,7.00700000e-01,4.09000000e-03,5.53100000e-01,1.94200000e-01,4.91300000e-01,7.00700000e-01],
# [2.47100000e-01,9.99900000e-01,9.83900000e-03,6.57200000e-01,7.32900000e-02,8.37100000e-04,9.99900000e-01,9.83900000e-03,6.57200000e-01,7.32900000e-02,8.37100000e-04,9.99900000e-01]])
print(a.shape)
print(b.shape)
def send_expand_matrix(ROW,COL,matrix):
matrixInt,matrixIntTimes=mulmatOT.ChaneToInt(ROW,COL,matrix)
return [ROW,COL,matrixIntTimes]
def receive_expand_matrix(msg):
return msg
def mainx(argv):
try:
opts, args = getopt.getopt(argv,"hr:")
except getopt.GetoptError:
print ('xxx.py -r (0 or 1)')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print ('xxx.py -r (0 or 1)')
sys.exit()
elif opt in ("-r"):
role = int(arg)
begin_time=time()
msgfromA=send_expand_matrix(ROW_A,COL_A,a)
expandsA=receive_expand_matrix(msgfromA)
msgfromB=send_expand_matrix(ROW_B,COL_B,b)
expandsB=receive_expand_matrix(msgfromB)
#fff=np.matmul(b,a)
fff=b*a
print(fff)
if role==0:
res1=mulmatOT.mulmatOT_wise(0,7891,b,expandsA,ROW_B,COL_B)
print("------------------result------------------")
print(res1)
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(('127.0.0.1', 9999))
message, address = s.recvfrom(1024)
res2=pickle.loads(message)
print("------------------res2------------------")
print(res2)
res3=res1+res2
print("------------------res3------------------")
print(res3)
res4=res3-fff
print("------------------res4------------------")
print(res4)
else:
res2=mulmatOT.mulmatOT_wise(1,7891,a,expandsB,ROW_A,COL_A)
print("------------------result------------------")
print(res2)
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(('127.0.0.1', 9998))
host = '127.0.0.1' # 获取本地主机名
port = 9999 # 设置端口号
message=pickle.dumps(res2)
Addr=(host,port)
s.sendto(message,Addr)
end_time=time()
run_time=end_time-begin_time
print(run_time)
def handle1(sid):
msgfromA=send_expand_matrix(ROW_A,COL_A,a)
expandsA=receive_expand_matrix(msgfromA)
print("Thread %d run"%sid)
res1=mulmatOT.mulmatOT(0,b,expandsA,ROW_B,COL_B)
print("------------------result------------------")
print(res1)
def handle2(sid):
msgfromB=send_expand_matrix(ROW_B,COL_B,b)
expandsB=receive_expand_matrix(msgfromB)
print("Thread %d run"%sid)
res2=mulmatOT.mulmatOT(1,a,expandsB,ROW_A,COL_A)
print("------------------result------------------")
print(res2)
#创建线程方法1
if __name__ == "__main__":
# t1 = threading.Thread(target=handle1, args=(1,))
# t1.start()
# t2 = threading.Thread(target=handle2, args=(2,))
# t2.start()
mainx(sys.argv[1:])
|
nilq/baby-python
|
python
|
import poplib
import getpass
import sys
import mailconfig
# Configuramos o servidor
mailserver = mailconfig.servidor_pop
# O nome do usuário
mailuser = mailconfig.usuário_pop
# Pedimos por uma senha
mailpasswd = getpass.getpass('Senha para %s?' % mailserver)
# Iniciamos o processo de conexão com o servidor
print('Conectando...')
# Por falta de exemplos práticos estaremos conectando no servidor
# do gmail. Para tanto utilizaremos um objeto especial o POP3_SSL
# ao invés do objeto clássico POP3. Esta diferença ocorre porque
# o servidor do google é implementado por cima de um socket
# criptografado com SSL. É preciso notar que o port para servidores
# desse tipo é, por default, 995, ao invés de 110 do POP3. O port
# pode ser configurado especialmente para cada servidor passando
# o parâmetro como uma string para a construção do objeto
server = poplib.POP3_SSL(mailserver)
server.user(mailuser)
server.pass_(mailpasswd)
# Uma vez conectado iniciamos o processo de recebimento de dados
try:
# Se o servidor tiver alguma mensagem de boas vindas nós o
# recebemos a partir do método "getwelcome"
print(server.getwelcome())
# Podemos obter dados do servidor utilizando o método stat
msgCount, msgBytes = server.stat()
# Imprimimos as informações recebidas
print('Existem ', msgCount, 'mensagens de email em', msgBytes, 'bytes')
# Aqui imprimimos as listas de mensagens com os bytes correspondentes
# a cada uma delas
print(server.list())
# Esperamos o usuário querer prosseguir para ler o conteúdo das mensagens
print('-' * 80)
input('[Pressione Enter para Prosseguir]')
# Iremos percorrer cada uma das mensagens contidas na caixa de email
for i in range(msgCount):
# Podemos ler o conteúdo das mensagens utilizando o método
# retr e passando o número da mensagem que nós desejamos
# obter
hdr, mensagem, octets = server.retr(i+1)
# Imprimimos todo o conteúdo da mensagem que acabamos de ler
# só que temos que decodifica-la, uma vez que todo o texto de
# email está em Bytes para o python 3.x
for linha in mensagem: print(linha.decode())
# Imprimimos um separador de conteúdo
print('-' * 80)
# Se nós não tivermos lido todas as mensagens ainda podemos
if i < msgCount - 1:
# Esperar o usuário pressionar enter para prosseguir com a
# leitura de mensagens
input('[Pressione Enter para Prosseguir]')
finally:
# Nós fechamos a conexão com o email a partir
# do método quit
server.quit()
|
nilq/baby-python
|
python
|
import os
import pathlib
# Clone the tensorflow models repository if it doesn't already exist
if "models" in pathlib.Path.cwd().parts:
while "models" in pathlib.Path.cwd().parts:
os.chdir('..')
elif not pathlib.Path('models').exists():
!git clone --depth 1 https://github.com/tensorflow/models
import matplotlib
import matplotlib.pyplot as plt
import os
import random
import io
import imageio
import glob
import scipy.misc
import numpy as np
from six import BytesIO
from PIL import Image, ImageDraw, ImageFont
from IPython.display import display, Javascript
from IPython.display import Image as IPyImage
import tensorflow as tf
from object_detection.utils import label_map_util
from object_detection.utils import config_util
from object_detection.utils import visualization_utils as viz_utils
from object_detection.utils import colab_utils
from object_detection.builders import model_builder
%matplotlib inline
def load_image_into_numpy_array(path):
"""Load an image from file into a numpy array.
Puts image into numpy array to feed into tensorflow graph.
Note that by convention we put it into a numpy array with shape
(height, width, channels), where channels=3 for RGB.
Args:
path: a file path.
Returns:
uint8 numpy array with shape (img_height, img_width, 3)
"""
img_data = tf.io.gfile.GFile(path, 'rb').read()
image = Image.open(BytesIO(img_data))
(im_width, im_height) = image.size
return np.array(image.getdata()).reshape(
(im_height, im_width, 3)).astype(np.uint8)
def plot_detections(image_np,
boxes,
classes,
scores,
category_index,
figsize=(12, 16),
image_name=None):
"""Wrapper function to visualize detections.
Args:
image_np: uint8 numpy array with shape (img_height, img_width, 3)
boxes: a numpy array of shape [N, 4]
classes: a numpy array of shape [N]. Note that class indices are 1-based,
and match the keys in the label map.
scores: a numpy array of shape [N] or None. If scores=None, then
this function assumes that the boxes to be plotted are groundtruth
boxes and plot all boxes as black with no classes or scores.
category_index: a dict containing category dictionaries (each holding
category index `id` and category name `name`) keyed by category indices.
figsize: size for the figure.
image_name: a name for the image file.
"""
image_np_with_annotations = image_np.copy()
viz_utils.visualize_boxes_and_labels_on_image_array(
image_np_with_annotations,
boxes,
classes,
scores,
category_index,
use_normalized_coordinates=True,
min_score_thresh=0.8)
if image_name:
plt.imsave(image_name, image_np_with_annotations)
else:
plt.imshow(image_np_with_annotations)
# Load images and visualize
train_image_dir = 'models/research/object_detection/test_images/ducky/train/'
train_images_np = []
for i in range(1, 6):
image_path = os.path.join(train_image_dir, 'robertducky' + str(i) + '.jpg')
train_images_np.append(load_image_into_numpy_array(image_path))
plt.rcParams['axes.grid'] = False
plt.rcParams['xtick.labelsize'] = False
plt.rcParams['ytick.labelsize'] = False
plt.rcParams['xtick.top'] = False
plt.rcParams['xtick.bottom'] = False
plt.rcParams['ytick.left'] = False
plt.rcParams['ytick.right'] = False
plt.rcParams['figure.figsize'] = [14, 7]
for idx, train_image_np in enumerate(train_images_np):
plt.subplot(2, 3, idx+1)
plt.imshow(train_image_np)
plt.show()
gt_boxes = [
np.array([[0.436, 0.591, 0.629, 0.712]], dtype=np.float32),
np.array([[0.539, 0.583, 0.73, 0.71]], dtype=np.float32),
np.array([[0.464, 0.414, 0.626, 0.548]], dtype=np.float32),
np.array([[0.313, 0.308, 0.648, 0.526]], dtype=np.float32),
np.array([[0.256, 0.444, 0.484, 0.629]], dtype=np.float32)
]
# By convention, our non-background classes start counting at 1. Given
# that we will be predicting just one class, we will therefore assign it a
# `class id` of 1.
duck_class_id = 1
num_classes = 1
category_index = {duck_class_id: {'id': duck_class_id, 'name': 'rubber_ducky'}}
# Convert class labels to one-hot; convert everything to tensors.
# The `label_id_offset` here shifts all classes by a certain number of indices;
# we do this here so that the model receives one-hot labels where non-background
# classes start counting at the zeroth index. This is ordinarily just handled
# automatically in our training binaries, but we need to reproduce it here.
label_id_offset = 1
train_image_tensors = []
gt_classes_one_hot_tensors = []
gt_box_tensors = []
for (train_image_np, gt_box_np) in zip(
train_images_np, gt_boxes):
train_image_tensors.append(tf.expand_dims(tf.convert_to_tensor(
train_image_np, dtype=tf.float32), axis=0))
gt_box_tensors.append(tf.convert_to_tensor(gt_box_np, dtype=tf.float32))
zero_indexed_groundtruth_classes = tf.convert_to_tensor(
np.ones(shape=[gt_box_np.shape[0]], dtype=np.int32) - label_id_offset)
gt_classes_one_hot_tensors.append(tf.one_hot(
zero_indexed_groundtruth_classes, num_classes))
print('Done prepping data.')
"""
Visualize as a sanity check
"""
dummy_scores = np.array([1.0], dtype=np.float32) # give boxes a score of 100%
plt.figure(figsize=(30, 15))
for idx in range(5):
plt.subplot(2, 3, idx+1)
plot_detections(
train_images_np[idx],
gt_boxes[idx],
np.ones(shape=[gt_boxes[idx].shape[0]], dtype=np.int32),
dummy_scores, category_index)
plt.show()
# Download the checkpoint and put it into models/research/object_detection/test_data/
!wget http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_resnet50_v1_fpn_640x640_coco17_tpu-8.tar.gz
!tar -xf ssd_resnet50_v1_fpn_640x640_coco17_tpu-8.tar.gz
!mv ssd_resnet50_v1_fpn_640x640_coco17_tpu-8/checkpoint models/research/object_detection/test_data/
tf.keras.backend.clear_session()
print('Building model and restoring weights for fine-tuning...', flush=True)
num_classes = 1
pipeline_config = 'models/research/object_detection/configs/tf2/ssd_resnet50_v1_fpn_640x640_coco17_tpu-8.config'
checkpoint_path = 'models/research/object_detection/test_data/checkpoint/ckpt-0'
# Load pipeline config and build a detection model.
#
# Since we are working off of a COCO architecture which predicts 90
# class slots by default, we override the `num_classes` field here to be just
# one (for our new rubber ducky class).
configs = config_util.get_configs_from_pipeline_file(pipeline_config)
model_config = configs['model']
model_config.ssd.num_classes = num_classes
model_config.ssd.freeze_batchnorm = True
detection_model = model_builder.build(
model_config=model_config, is_training=True)
# Set up object-based checkpoint restore --- RetinaNet has two prediction
# `heads` --- one for classification, the other for box regression. We will
# restore the box regression head but initialize the classification head
# from scratch (we show the omission below by commenting out the line that
# we would add if we wanted to restore both heads)
fake_box_predictor = tf.compat.v2.train.Checkpoint(
_base_tower_layers_for_heads=detection_model._box_predictor._base_tower_layers_for_heads,
# _prediction_heads=detection_model._box_predictor._prediction_heads,
# (i.e., the classification head that we *will not* restore)
_box_prediction_head=detection_model._box_predictor._box_prediction_head,
)
fake_model = tf.compat.v2.train.Checkpoint(
_feature_extractor=detection_model._feature_extractor,
_box_predictor=fake_box_predictor)
ckpt = tf.compat.v2.train.Checkpoint(model=fake_model)
ckpt.restore(checkpoint_path).expect_partial()
# Run model through a dummy image so that variables are created
image, shapes = detection_model.preprocess(tf.zeros([1, 640, 640, 3]))
prediction_dict = detection_model.predict(image, shapes)
_ = detection_model.postprocess(prediction_dict, shapes)
print('Weights restored!')
tf.keras.backend.set_learning_phase(True)
# These parameters can be tuned; since our training set has 5 images
# it doesn't make sense to have a much larger batch size, though we could
# fit more examples in memory if we wanted to.
batch_size = 4
learning_rate = 0.01
num_batches = 100
# Select variables in top layers to fine-tune.
trainable_variables = detection_model.trainable_variables
to_fine_tune = []
prefixes_to_train = [
'WeightSharedConvolutionalBoxPredictor/WeightSharedConvolutionalBoxHead',
'WeightSharedConvolutionalBoxPredictor/WeightSharedConvolutionalClassHead']
for var in trainable_variables:
if any([var.name.startswith(prefix) for prefix in prefixes_to_train]):
to_fine_tune.append(var)
# Set up forward + backward pass for a single train step.
def get_model_train_step_function(model, optimizer, vars_to_fine_tune):
"""Get a tf.function for training step."""
# Use tf.function for a bit of speed.
# Comment out the tf.function decorator if you want the inside of the
# function to run eagerly.
@tf.function
def train_step_fn(image_tensors,
groundtruth_boxes_list,
groundtruth_classes_list):
"""A single training iteration.
Args:
image_tensors: A list of [1, height, width, 3] Tensor of type tf.float32.
Note that the height and width can vary across images, as they are
reshaped within this function to be 640x640.
groundtruth_boxes_list: A list of Tensors of shape [N_i, 4] with type
tf.float32 representing groundtruth boxes for each image in the batch.
groundtruth_classes_list: A list of Tensors of shape [N_i, num_classes]
with type tf.float32 representing groundtruth boxes for each image in
the batch.
Returns:
A scalar tensor representing the total loss for the input batch.
"""
shapes = tf.constant(batch_size * [[640, 640, 3]], dtype=tf.int32)
model.provide_groundtruth(
groundtruth_boxes_list=groundtruth_boxes_list,
groundtruth_classes_list=groundtruth_classes_list)
with tf.GradientTape() as tape:
preprocessed_images = tf.concat(
[detection_model.preprocess(image_tensor)[0]
for image_tensor in image_tensors], axis=0)
prediction_dict = model.predict(preprocessed_images, shapes)
losses_dict = model.loss(prediction_dict, shapes)
total_loss = losses_dict['Loss/localization_loss'] + losses_dict['Loss/classification_loss']
gradients = tape.gradient(total_loss, vars_to_fine_tune)
optimizer.apply_gradients(zip(gradients, vars_to_fine_tune))
return total_loss
return train_step_fn
optimizer = tf.keras.optimizers.SGD(learning_rate=learning_rate, momentum=0.9)
train_step_fn = get_model_train_step_function(
detection_model, optimizer, to_fine_tune)
print('Start fine-tuning!', flush=True)
for idx in range(num_batches):
# Grab keys for a random subset of examples
all_keys = list(range(len(train_images_np)))
random.shuffle(all_keys)
example_keys = all_keys[:batch_size]
# Note that we do not do data augmentation in this demo. If you want a
# a fun exercise, we recommend experimenting with random horizontal flipping
# and random cropping :)
gt_boxes_list = [gt_box_tensors[key] for key in example_keys]
gt_classes_list = [gt_classes_one_hot_tensors[key] for key in example_keys]
image_tensors = [train_image_tensors[key] for key in example_keys]
# Training step (forward pass + backwards pass)
total_loss = train_step_fn(image_tensors, gt_boxes_list, gt_classes_list)
if idx % 10 == 0:
print('batch ' + str(idx) + ' of ' + str(num_batches)
+ ', loss=' + str(total_loss.numpy()), flush=True)
print('Done fine-tuning!')
""" Testing """
test_image_dir = 'models/research/object_detection/test_images/ducky/test/'
test_images_np = []
for i in range(1, 50):
image_path = os.path.join(test_image_dir, 'out' + str(i) + '.jpg')
test_images_np.append(np.expand_dims(
load_image_into_numpy_array(image_path), axis=0))
# Again, uncomment this decorator if you want to run inference eagerly
@tf.function
def detect(input_tensor):
"""Run detection on an input image.
Args:
input_tensor: A [1, height, width, 3] Tensor of type tf.float32.
Note that height and width can be anything since the image will be
immediately resized according to the needs of the model within this
function.
Returns:
A dict containing 3 Tensors (`detection_boxes`, `detection_classes`,
and `detection_scores`).
"""
preprocessed_image, shapes = detection_model.preprocess(input_tensor)
prediction_dict = detection_model.predict(preprocessed_image, shapes)
return detection_model.postprocess(prediction_dict, shapes)
# Note that the first frame will trigger tracing of the tf.function, which will
# take some time, after which inference should be fast.
label_id_offset = 1
for i in range(len(test_images_np)):
input_tensor = tf.convert_to_tensor(test_images_np[i], dtype=tf.float32)
detections = detect(input_tensor)
plot_detections(
test_images_np[i][0],
detections['detection_boxes'][0].numpy(),
detections['detection_classes'][0].numpy().astype(np.uint32)
+ label_id_offset,
detections['detection_scores'][0].numpy(),
category_index, figsize=(15, 20), image_name="gif_frame_" + ('%02d' % i) + ".jpg")
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Author: Keurfon Luu <keurfon.luu@mines-paristech.fr>
License: MIT
"""
__all__ = [ "progress_bar", "progress_perc", "progress" ]
def progress_bar(i, imax, n = 50):
bar = list("[" + n * " " + "]")
perc = (i+1) / imax
bar[1:int(perc*n)+1] = int(perc*n) * "="
imid = (n+2) // 2
if perc < 0.1:
pstr = " %.2f%%" % (perc*100.)
elif 0.1 <= perc < 1.:
pstr = "%.2f%%" % (perc*100.)
else:
pstr = "100.0%"
bar[imid-3:imid+3] = pstr
print("\r" + "".join(bar), end = "", flush = True)
def progress_perc(i, imax, prefix = None):
perc = (i+1) / imax
if perc < 0.1:
pstr = " %.2f%% " % (perc*100.)
elif 0.1 <= perc < 1.:
pstr = "%.2f%% " % (perc*100.)
else:
pstr = "100.0%"
if prefix is None:
prefix = "Progression: "
print("\r%s%s" % (prefix, pstr), end = "", flush = True)
def progress(i, imax, ptype = "bar", n = 50, prefix = None):
if ptype == "bar":
progress_bar(i, imax, n)
elif ptype == "perc":
progress_perc(i, imax, prefix)
else:
raise ValueError("unknown progression type '%s'" % ptype)
|
nilq/baby-python
|
python
|
import pytz
from pytz import timezone, common_timezones
from datetime import datetime
def local_to_utc(local_time, local_tz, aware=True):
if local_tz not in common_timezones:
raise ValueError('Timezone: %s is not in common list' % (local_tz))
utc = pytz.utc
tz = timezone(local_tz)
if aware:
time = tz.localize(datetime.strptime(local_time, '%d/%m/%Y %H:%M'))
return time.astimezone(utc)
else:
time = tz.localize(datetime.strptime(local_time, '%d/%m/%Y %H:%M'))
time = time.astimezone(utc)
return time.replace(tzinfo=None)
def utc_to_local(utc_time, local_tz):
if local_tz not in common_timezones:
raise ValueError('Timezone: %s is not in common list' % (local_tz))
fmt = '%d/%m/%Y %H:%M'
local_tz = timezone(local_tz)
utc_time = pytz.utc.localize(utc_time)
local_time = utc_time.astimezone(local_tz)
return local_time.strftime(fmt)
|
nilq/baby-python
|
python
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# @uthor: Makram Jandar
# ____ __ ___ __ ______ ___ ___
# | | | | \| | | | / _] \
# |__ | | | o ) | | |/ [_| D )
# __| | | | _/| ~ |_| |_| _] /
# / | | : | | |___, | | | | [_| \
# \ ` | | | | | | | | | . \
# \____j\__,_|__| |____/ |__| |_____|__|\_|
# © Jupyter Helper Functions & more
""" Several helper functions for interactive use. """
import time, sys
from IPython.core.display import HTML
""" Reloading Jupyter from cell """
def reloadJupyter():
return HTML("<script>Jupyter.notebook.kernel.restart()</script>")
""" Progress Bar Generator """
def updateProgress(progress):
# update_progress() : Displays or updates a console progress bar
# Accepts a float between 0 and 1. Any int will be converted to a float.
# A value under 0 represents a 'halt'.
# A value at 1 or bigger represents 100%
barLength = 10 # Modify this to change the length of the progress bar
status = ""
if isinstance(progress, int):
progress = float(progress)
if not isinstance(progress, float):
progress = 0
status = "error: progress var must be float\r\n"
if progress < 0:
progress = 0
status = "Halt...\r\n"
if progress >= 1:
progress = 1
status = "Done...\r\n"
block = int(round(barLength*progress))
text = "\rPercent: [{0}] {1}% {2}".format( "#"*block + "-"*(barLength-block), progress*100, status)
sys.stdout.write(text)
sys.stdout.flush()
|
nilq/baby-python
|
python
|
import sys, re
try:
from Bio import Entrez
except ImportError as exc:
print(f"### Error: {exc}", file=sys.stderr)
print(f"### This program requires biopython", file=sys.stderr)
print(f"### Install: conda install -y biopython>=1.79", file=sys.stderr)
sys.exit(-1)
from biorun.libs import placlib as plac
from tqdm import tqdm
from biorun import utils
Entrez.email = 'foo@foo.com'
ncbi_patt = r'(?P<letters>[a-zA-Z]+)(?P<under>_?)(?P<digits>\d+)(\.(?P<version>\d+))?'
ncbi_patt = re.compile(ncbi_patt)
def detect_format(text):
# Allow integer (gi numbers)
try:
int(text)
return text
except ValueError as exc:
pass
m = ncbi_patt.search(text)
if not m:
utils.error(f"accession number format not recognized: {text}")
# Unused at this time.
letters, digits, under, version = m.group("letters"), m.group("digits"), m.group("under"), m.group("version")
return text
def efetch(ids, db='nuccore', rettype='gbwithparts', retmode='text'):
stream = Entrez.efetch(db="nucleotide", id=ids, rettype=rettype, retmode=retmode)
stream = tqdm(stream, unit='B', unit_divisor=1024, desc='# downloaded', unit_scale=True, delay=5, leave=False)
for line in stream:
print(line, end='')
stream.update(len(line))
stream.close()
@plac.pos("acc", "accession numbers")
@plac.opt("db", "database", choices=["nuccore", "protein"])
@plac.opt("format_", "return format", choices=["gbwithparts", "fasta", "gb"])
@plac.opt("alias", "remap sequence ids")
def run(db="nuccore", format_="gbwithparts", alias='', *acc):
ids = []
for num in acc:
ids.extend(num.split(","))
if not sys.stdin.isatty():
lines = utils.read_lines(sys.stdin, sep='')
ids.extend(lines)
ids = map(detect_format, ids)
ids = ",".join(ids)
if ids:
efetch(db=db, rettype=format_, ids=ids)
else:
utils.error("no accession numbers were specified")
if __name__ == '__main__':
# id = "AY851612",
run()
|
nilq/baby-python
|
python
|
'''
Agilent 33220A
Created on October 11, 2009
@author: bennett
'''
# Future functions GetLoad and SetLoad, GetUnits and SetUnits
import gpib_instrument
class Agilent33220A(gpib_instrument.Gpib_Instrument):
'''
The Agilent 33220A Arbitrary Function Generator GPIB communication class (Incomplete)
'''
def __init__(self, pad, board_number = 0, name = '', sad = 0, timeout = 17, send_eoi = 1, eos_mode = 0):
'''
Constructor The PAD (Primary GPIB Address) is the only required parameter
'''
super(Agilent33220A, self).__init__(board_number, name, pad, sad, timeout, send_eoi, eos_mode)
# GPIB identity string of the instrument
self.id_string = "Agilent Technologies,33220A,MY44036372,2.02-2.02-22-2"
self.manufacturer = 'Agilent'
self.model_number = '33220A'
self.description = 'Arbitrary Function Generator'
#self.compare_identity()
self.vmax = 5.0 #assumes 50 Ohm load setting
def SetFunction(self, function = 'sine'):
'''
Set Output Function Type
'''
if function == 'sine':
functionstring = 'SINusoid'
elif function == 'square':
functionstring = 'SQUare'
elif function == 'ramp':
functionstring = 'RAMP'
elif function == 'pulse':
functionstring = 'PULSe'
elif function == 'noise':
functionstring = 'NOISe'
elif function == 'dc':
functionstring = 'DC'
elif function == 'user':
functionstring = 'USER'
else:
print 'Inva;if type of function'
functionstring = ''
commandstring = 'FUNCtion ' + functionstring
self.write(commandstring)
def GetFunction(self):
'''Get the current function type'''
commandstring = 'FUNCtion?'
result = self.ask(commandstring)
function = result
return function
def SetFrequency(self, frequency = 100):
'''
Set the output frequency in Hz
'''
function = self.GetFunction()
if function == 'SIN':
if frequency > 20e6:
print 'Greater then max frequency'
frequency = 20e6
if frequency < 1e-6:
print 'Smaller then min frequency'
frequency = 1e-6
if function == 'SQU':
if frequency > 20e6:
print 'Greater then max frequency'
frequency = 20e6
if frequency < 1e-6:
print 'Smaller then min frequency'
frequency = 1e-6
if function == 'RAMP':
if frequency > 200e3:
print 'Greater then max frequency'
frequency = 200e3
if frequency < 1e-6:
print 'Smaller then min frequency'
frequency = 1e-6
if function == 'PULS':
if frequency > 5e6:
print 'Greater then max frequency'
frequency = 5e6
if frequency < 500e-6:
print 'Smaller then min frequency'
frequency = 500e-6
if function == 'NOIS':
print 'Frequency not applicable for Noise'
if function == 'DC':
print 'Frequency not applicable for DC'
if function == 'USER':
if frequency > 6e6:
print 'Greater then max frequency'
frequency = 6e6
if frequency < 1e-6:
print 'Smaller then min frequency'
frequency = 1e-6
frequencystring = str(frequency)
commandstring = 'FREQuency ' + frequencystring
self.write(commandstring)
def GetFrequency(self):
'''Get the current frequencye'''
commandstring = 'FREQuency?'
result = self.ask(commandstring)
frequency = float(result)
return frequency
def SetAmplitude(self, amplitude = 0.1):
'''
Set the output amplitude in volts
'''
vmax = 5.0 #assumes 50 Ohm load setting
offset = self.GetOffset()
if amplitude < 0.010:
print 'Amplitude is below minimum'
amplitude = 0.010
if amplitude > 5.0:
print 'Amplitude greater than Max Voltage'
elif amplitude/2.0+abs(offset) > vmax:
print 'Combination of amplitude and offset greater than 5V. Offset will be modified.'
amplitudestring = str(amplitude)
commandstring = 'VOLTage ' + amplitudestring
self.write(commandstring)
def GetAmplitude(self):
'''Get the current amplitude'''
commandstring = 'VOLTage?'
result = self.ask(commandstring)
amplitude = float(result)
return amplitude
def SetOffset(self, offset):
'''Set the offset voltage'''
amplitude = self.GetAmplitude()
if amplitude/2.0+abs(offset) > self.vmax:
print 'Combination of amplitude and offset greater than 5V. Amplitude will be modified.'
offsetstring = str(offset)
commandstring = 'VOLTage:OFFSet ' + offsetstring
self.write(commandstring)
def GetOffset(self):
'''Get the current offset voltage'''
commandstring = 'VOLTage:OFFSet?'
result = self.ask(commandstring)
offset = float(result)
return offset
def SetVoltageHigh(self, vhigh):
'''Set the high voltage'''
vlow = self.GetVoltageLow()
if vhigh > self.vmax:
print 'Requested voltage is greater than vmax'
vhigh = self.vmax
if vhigh < vlow:
print 'Requested voltage is less then low voltage'
voltagestring = str(vhigh)
commandstring = 'VOLTage:HIGH ' + voltagestring
self.write(commandstring)
def GetVoltageHigh(self):
'''Get the current high voltage'''
commandstring = 'VOLTage:HIGH?'
result = self.ask(commandstring)
vhigh = float(result)
return vhigh
def SetVoltageLow(self, vlow):
'''Set the low voltage'''
vhigh = self.GetVoltageHigh()
if vlow < -1*self.vmax:
print 'Requested voltage is less than vmin'
vlow = -1*self.vmax
if vlow > vhigh:
print 'Requested voltage is greater then high voltage'
voltagestring = str(vlow)
commandstring = 'VOLTage:LOW ' + voltagestring
self.write(commandstring)
def GetVoltageLow(self):
'''Get the current low voltage'''
commandstring = 'VOLTage:LOW?'
result = self.ask(commandstring)
vlow = float(result)
return vlow
def SetOutput(self, outputstate):
'''Set the state of the output 'off' or 'on' '''
if outputstate != 'on' and outputstate != 'off':
print 'Invalid output state, setting to off'
outputstate = 'off'
commandstring = 'OUTPut ' + outputstate
self.write(commandstring)
def SetPulsePeriod(self, period):
'''Set the pulse period'''
periodstring = str(period)
commandstring = 'PULSe:PERiod ' + periodstring
self.write(commandstring)
def GetPulsePeriod(self):
'''Get the pulse period'''
commandstring = 'PULSe:PERiod?'
result = self.ask(commandstring)
period = float(result)
return period
def SetPulseWidth(self, width):
'''Set the pulse width'''
widthstring = str(width)
commandstring = 'FUNCtion:PULSe:WIDTh ' + widthstring
self.write(commandstring)
def GetPulseWidth(self):
'''Get the pulse width'''
commandstring = 'FUNCtion:PULSe:WIDTh?'
result = self.ask(commandstring)
width = float(result)
return width
def SetPulseEdgeTime(self, edgetime):
'''Set the pulse edge time'''
edgetimestring = str(edgetime)
commandstring = 'FUNCtion:PULSe:TRANsition ' + edgetimestring
self.write(commandstring)
def GetPulseEdgeTime(self):
'''Get the pulse width'''
commandstring = 'FUNCtion:PULSe:TRANsition?'
result = self.ask(commandstring)
edgetime = float(result)
return edgetime
def GetOutput(self):
'''Get the state of the output 'off' or 'on' '''
commandstring = 'OUTPut?'
result = self.ask(commandstring)
if result == '0':
state = 'off'
elif result == '1':
state = 'on'
else:
print 'Error querrying state'
state = 'error'
return state
def GetListOfArbWaveform(self):
''' Return a list of stings that are the names of the waveforms in memory'''
commandstring = 'DATA:CATalog?'
result = self.ask(commandstring)
catalog = result.split(',') # split into a list
for k in range(len(catalog)): # loop over list
catalog[k]=catalog[k][1:-1] # strip leading and trailing quotes
return catalog
def SelectArbWaveform(self, waveform_name = 'VOLATILE'):
'''Select the arbitrary waveform to output '''
catalog = self.GetListOfArbWaveform()
if waveform_name not in catalog:
print 'Wavform does not exist. Setting to VOLATILE'
waveform_name = 'VOLATILE'
if waveform_name not in catalog:
print 'VOLATILE does not exist. Setting to EXP_RISE'
waveform_name = 'EXP_RISE'
commandstring = 'FUNCtion:USER ' + waveform_name
self.write(commandstring)
def GetSelectedArbWaveform(self):
'''Get the currently selected abr waveform '''
commandstring = 'FUNCtion:USER?'
result = self.ask(commandstring)
waveform_name = result
return waveform_name
def SendArbWaveform(self, waveform):
'''Send the arbitrary waeform to volatile memory '''
waveliststring = str(list(waveform)) #turn array or whetever to a list and then string
datastring = waveliststring[1:-1] # strip off the brackets on the end
commandstring = 'DATA VOLATILE, ' + datastring
self.write(commandstring)
|
nilq/baby-python
|
python
|
from collections import Counter
import json
import math
from pprint import pprint
import re
import sys
import urllib.request
import glob
files = glob.glob(
"/Users/nakamura/git/d_umesao/umesao_images/docs/iiif/item/*/manifest.json")
files = sorted(files)
selections = []
prefix = "https://nakamura196.github.io/vis_umesao"
for i in range(len(files)):
file = files[i]
# メイン
if i % 1000 == 0:
print(str(i+1)+"/"+str(len(files))+"\t"+file)
with open(file) as f:
manifest = json.load(f)
manifest_uri = manifest["@id"]
id = manifest_uri.split("/")[-2]
metadata = []
if "metadata" in manifest:
metadata_old = manifest["metadata"]
for obj in metadata_old:
if obj["label"] == "資料種別":
metadata.append(obj)
canvases = manifest["sequences"][0]["canvases"]
if len(canvases) == 0:
continue
member = {
"@id": canvases[0]["@id"],
"id" : id,
"@type": "sc:Canvas",
"label": manifest["label"],
"metadata": metadata,
"thumbnail": manifest["thumbnail"]["@id"],
"related": "https://umesao.cultural.jp/item/"+id
}
members = [member]
selection = {
"@id": prefix + "/iiif/curation/"+id+"/range1",
"@type": "sc:Range",
"label": "Characters",
"members": members,
"within": {
"@id": manifest_uri,
"@type": "sc:Manifest",
"label": manifest["label"]
}
}
selections.append(selection)
OUTPUT_FILE = "../data/src/curation.json"
curation = {
"@context": [
"http://iiif.io/api/presentation/2/context.json",
"http://codh.rois.ac.jp/iiif/curation/1/context.json"
],
"@id": prefix + "/iiif/curation/curation.json",
"@type": "cr:Curation",
"label": "Character List",
"selections": selections
}
fw = open(OUTPUT_FILE, 'w')
json.dump(curation, fw, ensure_ascii=False, indent=4,
sort_keys=True, separators=(',', ': '))
|
nilq/baby-python
|
python
|
# Generated by Django 2.0.6 on 2018-06-29 16:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('grafik', '0004_auto_20180629_2115'),
]
operations = [
migrations.RemoveField(
model_name='passenger',
name='kecamatan',
),
migrations.AlterField(
model_name='passenger',
name='sex',
field=models.CharField(choices=[('M', 'male'), ('F', 'female')], max_length=10),
),
]
|
nilq/baby-python
|
python
|
# Generates simulation data for time-series count data with decoupled mean and variance
import numpy as np
import pandas as pd
from scipy.stats import norm
import statsmodels.api as sm
from datetime import datetime as dt
from dateutil.relativedelta import relativedelta
from dataclasses import dataclass
def generate_data(ar1_corr=None):
days, x_df = get_predictors()
beta, alpha = get_coefs()
mu, sigma = get_derived_params(x_df, beta, alpha)
z, y = sample_data(mu, sigma, ar1_corr)
# Return data in dataclass
@dataclass(frozen=True)
class Data:
days: pd.Series
x_df: pd.DataFrame
x: np.ndarray
beta: np.ndarray
alpha: np.ndarray
mu: np.ndarray
sigma: np.ndarray
z: np.ndarray
y: np.ndarray
data = Data(
days=days,
x_df=x_df,
x=x_df.values,
beta=beta,
alpha=alpha,
mu=mu,
sigma=sigma,
z=z,
y=y)
return data
def get_predictors():
start_date = dt.strptime("2017-01-01", "%Y-%m-%d").date()
num_years = 3
max_date = start_date + relativedelta(years=num_years, days=-1)
days = [start_date]
while days[-1] < max_date:
days.append(
days[-1] + relativedelta(days=1))
# Put date in data frame
df = pd.DataFrame({"day": days})
# Simple transformations
df = df.assign(
intercept=1.,
day_of_week=df.day.apply(lambda d: d.weekday()),
days_since_start=df.day.apply(lambda d: (d - start_date).days),
day_of_year=df.day.apply(lambda d: d.timetuple().tm_yday),
)
# Small modifications to transformations
df = df.assign(
days_since_start=(df.days_since_start - df.days_since_start.mean()) /
df.days_since_start.std(), # Rescaling
year_radians=df.day_of_year*2*np.pi/365,
)
# Long-term trends
days_since_start_squared_raw = df.days_since_start**2
trends = (df
.assign(days_since_start_squared=(
days_since_start_squared_raw - days_since_start_squared_raw.mean()
) / days_since_start_squared_raw.std())
.loc[:, ["days_since_start", "days_since_start_squared"]]
)
# Day of week
day_of_week = pd.get_dummies(df.day_of_week, prefix="day_of_week", drop_first=True)
# Seasonality
seasonality = df.assign(
seasonality_cos=np.cos(df.year_radians),
seasonality_sin=np.sin(df.year_radians)).loc[
:, ["seasonality_cos", "seasonality_sin"]
]
# Create design matrix
df_list = [
df.loc[:, ["intercept"]],
trends,
day_of_week,
seasonality,
]
x_df = pd.concat(df_list, axis=1)
return df.day, x_df
def get_coefs():
# Set beta
beta_intercept = [5.]
beta_trends = [0.4, -0.17]
beta_day_of_week = [0.33, 0.36, 0.4, 0.39, 0.26, 0.07] # Sunday is omitted
beta_seasonality = [0.2, -0.1]
beta = np.array(
beta_intercept +
beta_trends +
beta_day_of_week +
beta_seasonality)
# Set alpha
alpha_intercept = [-1.]
alpha_trends = [-0.2, -0.03]
alpha_day_of_week = [0.33, 0.36, 0.4, 0.39, 0.26, 0.07] # Sunday is omitted
alpha_seasonality = [0.16, -0.05]
alpha = np.array(
alpha_intercept +
alpha_trends +
alpha_day_of_week +
alpha_seasonality)
return beta, alpha
def get_derived_params(x_df, beta, alpha):
x = x_df.values
mu = x @ beta
exp_mu = np.exp(mu)
sigma = np.exp(x @ alpha)
return mu, sigma
def sample_data(mu, sigma, ar1_corr):
z = None
if ar1_corr is None:
z = norm.rvs(loc=mu, scale=sigma)
else:
arma_process = sm.tsa.ArmaProcess(np.array([1., -ar1_corr]))
epsilon_raw = arma_process.generate_sample(mu.size)
epsilon = epsilon_raw * np.sqrt((1 - ar1_corr**2))
z = mu + (sigma * epsilon)
y = np.floor(np.exp(z))
return z, y
|
nilq/baby-python
|
python
|
import machine
import utime
class KitronikPicoMotor:
#Pins 4 and 5 motor 1
#Pins 9 and 10 motor 2
#'Forward' is P5 or P9 driven high, with P4 or P10 held low.
#'Reverse' is P4 or P10 driven high, with P5 or P9 held low
#Driving the motor is simpler than the servo - just convert 0-100% to 0-4095 and push it to the correct registers.
#each motor has 4 writes - low and high bytes for a pair of registers.
def motorOn(self,motor, direction, speed):
#cap speed to 0-100%
if (speed<0):
speed = 0
elif (speed>100):
speed=100
#convert 0-100 to 0-65535
PWM = int(speed*655.35)
if motor == 1:
if direction == "f":
self.motor1Forward.duty_u16(PWM)
self.motor1Reverse.duty_u16(0)
elif direction == "r":
self.motor1Forward.duty_u16(0)
self.motor1Reverse.duty_u16(PWM)
else:
raise Exception("INVALID DIRECTION") #harsh, but at least you'll know
elif motor == 2:
if direction == "f":
self.motor2Forward.duty_u16(PWM)
self.motor2Reverse.duty_u16(0)
elif direction == "r":
self.motor2Forward.duty_u16(0)
self.motor2Reverse.duty_u16(PWM)
else:
raise Exception("INVALID DIRECTION") #harsh, but at least you'll know
else:
raise Exception("INVALID MOTOR") #harsh, but at least you'll know
#To turn off set the speed to 0...
def motorOff(self,motor):
self.motorOn(motor,"f",0)
#################
#Stepper Motors
#################
#this is only a basic full stepping.
#speed sets the length of the pulses (and hence the speed...)
#so is 'backwards' - the fastest that works reliably with the motors I have to hand is 20mS, but slower than that is good. tested to 2000 (2 seconds per step).
# motor should be 1 or 2 - 1 is terminals for motor 1 and 2 on PCB, 2 is terminals for motor 3 and 4 on PCB
def step(self,direction, steps, speed =20, holdPosition=False):
if(direction =="f"):
directions = ["f", "r"]
coils = [1,2]
elif (direction == "r"):
directions = ["r", "f"]
coils = [2,1]
else:
raise Exception("INVALID DIRECTION") #harsh, but at least you'll know
while steps > 0:
for direction in directions:
if(steps == 0):
break
for coil in coils:
self.motorOn(coil,direction,100)
utime.sleep_ms(speed)
steps -=1
if(steps == 0):
break
#to save power turn off the coils once we have finished.
#this means the motor wont hold position.
if(holdPosition == False):
for coil in coils:
self.motorOff(coil)
#Step an angle. this is limited by the step resolution - so 200 steps is 1.8 degrees per step for instance.
# a request for 20 degrees with 200 steps/rev will result in 11 steps - or 19.8 rather than 20.
def stepAngle(self, direction, angle, speed =20, holdPosition=False, stepsPerRev=200):
steps = int(angle/(360/stepsPerRev))
print (steps)
self.step(direction, steps, speed, holdPosition)
#initialisation code for using:
#defaluts to the standard pins and freq for the kitronik board, but could be overridden
def __init__(self,Motor1ForwardPin = machine.Pin(3),Motor1ReversePin = machine.Pin(2),Motor2ForwardPin = machine.Pin(6),Motor2ReversePin = machine.Pin(7),PWMFreq = 10000):
self.motor1Forward=machine.PWM(Motor1ForwardPin)
self.motor1Reverse=machine.PWM(Motor1ReversePin)
self.motor2Forward=machine.PWM(Motor2ForwardPin)
self.motor2Reverse=machine.PWM(Motor2ReversePin)
self.motor1Forward.freq(PWMFreq)
self.motor1Reverse.freq(PWMFreq)
self.motor2Forward.freq(PWMFreq)
self.motor2Reverse.freq(PWMFreq)
|
nilq/baby-python
|
python
|
from django import test
import actrack
from actrack.managers.inst import get_user_model
from actrack.actions import save_queue
__test__ = False
__unittest = True
class TestCase(test.TestCase):
@property
def user_model(self):
return get_user_model()
def log(self, *args, **kwargs):
commit = kwargs.pop('commit', False)
actrack.log(*args, **kwargs)
if commit:
self.save_queue()
@staticmethod
def save_queue():
save_queue(None)
|
nilq/baby-python
|
python
|
# DmrSmashTools by Dreamer
# Github Link: https://github.com/Dreamer13sq/DmrSmashTools/tree/main/DmrSmashTools_Blender
bl_info = {
"name": "Dmr Smash Tools",
"description": 'Some tools used to make modelling more efficient.',
"author": "Dreamer",
"version": (1, 0),
"blender": (2, 90, 0),
"category": "3D View",
"warning": 'To have addon operators appear in a search in Blender 2.9, Enable "Developer Extras" in Edit > Preferences > Interface > Display'
}
import bpy
# Utility Functions =====================================================
def lastLetters(name):
i = len(name)
char = 0;
for i in range(1, len(name)):
char = name[-i];
if (char >= "0" and char <= "9") or char == ".":
continue;
return name[-i:];
return 0;
# Returns last letter in string
def lastLetter(name):
for i in range(1, len(name)):
if name[-i].isalpha():
return name[-i];
return 0;
# Changes left name to right and vice-versa ("ShoulderL" -> "ShoulderR")
def switchName(name):
i = len(name)
char = 0;
for i in range(1, len(name)):
char = name[-i];
if char.isdigit() or char == ".":
continue;
name = list(name)
if name[-i] == "L":
name[-i] = "R";
else:
name[-i] = "L";
name = "".join(name);
return name;
i = len(name)
char = 0;
for i in range(1, len(name)):
char = name[-i];
if (char >= "0" and char <= "9") or char == ".":
continue;
name = list(name)
if name[-i] == "L":
name[-i] = "R";
else:
name[-i] = "L";
name = "".join(name);
return name;
# Returns list of vertices in vertex group
def FindVertexGroupVertices(mesh_object, groupname_or_index):
vert = [];
vertexGroups = mesh_object.vertex_groups;
targetGroupIndex = None;
# Given a name
if isinstance(groupname_or_index, str):
for vgroup in vertexGroups:
if vgroup.name == groupname_or_index:
targetGroupIndex = vgroup.index;
break;
# Given an index
elif isinstance(groupname_or_index, int):
for vgroup in vertexGroups:
if vgroup.index == groupname_or_index:
targetGroupIndex = vgroup.index;
break;
# Find vertices of group
for v in mesh_object.data.vertices:
for vge in v.groups:
if vge.group == targetGroupIndex:
vert.append(v);
break;
return vert;
# Returns true if distance between vertices is within given distance
def VertexInDist(v1, v2, dist):
x = v1.co[0] - v2.co[0];
y = v1.co[1] - v2.co[1];
z = v1.co[2] - v2.co[2];
return (x*x + y*y + z*z) <= dist;
# Returns closest vertex in vertex data. None if none is found under dist
def FindClosestVertex(sourceVertex, other_vertices, dist):
dist *= dist;
lastdist = dist;
lastVertex = None;
for v in other_vertices:
x = v.co[0] - sourceVertex.co[0];
y = v.co[1] - sourceVertex.co[1];
z = v.co[2] - sourceVertex.co[2];
dist = x*x + y*y + z*z;
if dist <= lastdist:
lastVertex = v;
lastdist = dist;
print(dist)
return lastVertex;
# Returns list of closest vertices in vertex data. Empty if none is found under dist
def FindClosestVertices(sourceVertex, other_vertices, dist):
dist *= dist;
vertexList = [];
for v in other_vertices:
x = v.co[0] - sourceVertex.co[0];
y = v.co[1] - sourceVertex.co[1];
z = v.co[2] - sourceVertex.co[2];
if (x*x + y*y + z*z) <= dist:
vertexList.append(v);
return vertexList;
# Clear weights from vertex
def ClearVertexWeights(v, vertexGroups):
for vge in v.groups:
vertexGroups[vge.group].remove([v.index]);
# Set Vertex Weight. Creates groups where necessary
def SetVertexWeight(v, weight_value, groupname, vertexGroups):
# Group exists
if groupname in vertexGroups.keys():
vertexGroups[groupname].add([v.index], weight_value, 'REPLACE');
# Create new group and add
else:
vertexGroups.new(name = groupname).add([v.index], weight_value, 'ADD');
# Get object Mode
def GetViewMode():
return bpy.context.active_object.mode;
# Set object Mode. Returns previously set mode
def SetViewMode(mode):
previous_mode = bpy.context.active_object.mode;
bpy.ops.object.mode_set(mode = mode);
return previous_mode;
# Sets Active Object
def SetActiveObject(object):
bpy.context.view_layer.objects.active = object;
return object;
# Returns Active Object
def GetActiveObject():
return bpy.context.view_layer.objects.active;
# Returns currently selected objects
def GetSelectedObjects(context):
return context.selected_objects;
def PanelInEditMode():
if bpy.context.active_object == None:
return False;
return (bpy.context.active_object.mode == 'EDIT') or (bpy.context.active_object.mode == 'WEIGHT_PAINT')
# Panels & Operators
classlist = [];
# Home Panel
class DmrSmashToolsPanel(bpy.types.Panel):
bl_label = "Dmr Smash Tools"
bl_idname = "DMR_SMASH_PT_MAINPANEL"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Dmr" # Name of sidebar
#bl_parent_id = 'DmrSmashToolsPanel' # Nested Panels
def draw(self, context):
layout = self.layout
# Sub-Layouts: row(), column(), column_flow(), grid_flow(), box(), split(), menu_pie()
#section = layout.box()
#section.emboss = 'NORMAL'
return;
classlist.append(DmrSmashToolsPanel);
# Vertex =============================================================================
class DMR_SMASH_DISPLAY_SEAMS_(bpy.types.PropertyGroup):
def activeUpdate(self, context):
bpy.context.space_data.overlay.show_edge_seams = self.isActive;
isActive : bpy.props.BoolProperty(
name = 'Seams',
description = '',
default = False,
update = None)
class DmrSmashToolsPanel_Vertex(bpy.types.Panel):
bl_label = "Vertex"
bl_idname = "DMR_SMASH_PT_VERTEX"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Dmr" # Name of sidebar
bl_parent_id = 'DMR_SMASH_PT_MAINPANEL' # Nested Panels
def draw(self, context):
layout = self.layout
section = layout.column();
subsection = section.column();
if PanelInEditMode():
subsection.operator('dmr_smash.snap_to_active')
subsection.operator('dmr_smash.snap_vertices')
subsection.operator('dmr_smash.match_vertex_inner')
subsection.operator('dmr_smash.dup_and_mirror')
subsection.operator('dmr_smash.reset_vertex_shape_keys')
else:
subsection.operator('dmr_smash.bake_shape_keys');
section.label(text = 'Object Vertex Match', icon = 'ARROW_LEFTRIGHT');
subsection = section.row(align = True);
subsection.operator('dmr_smash.match_vertex', text = "Vertex")
subsection.operator('dmr_smash.match_normals', text = "Normal")
subsection.operator('dmr_smash.match_weights', text = "Weight")
if PanelInEditMode():
section.label(text = 'Display', icon = 'RESTRICT_VIEW_ON');
subsection = section.row(align = True);
#subsection.prop(DMR_SMASH_DISPLAY_SEAMS, "isActive", toggle = True)
subsection.operator('dmr_smash.display_edge_seams', text = "Seams")
subsection.operator('dmr_smash.display_edge_sharps', text = "Sharps")
subsection.operator('dmr_smash.display_edge_creases', text = "Creases")
subsection.operator('dmr_smash.display_edge_normals', icon = "NORMALS_VERTEX_FACE")
classlist.append(DmrSmashToolsPanel_Vertex);
if 1: # Operators ("if" statement used for code-folding)
class DMR_SMASH_SNAPSELECTIONTOACTIVE(bpy.types.Operator):
bl_label = "Snap Selection To Active"
bl_idname = 'dmr_smash.snap_to_active'
bl_description = 'Moves Selected Vertices to Active Element';
def execute(self, context):
bpy.ops.view3d.snap_selected_to_active();
return {'FINISHED'}
classlist.append(DMR_SMASH_SNAPSELECTIONTOACTIVE);
class DMR_SMASH_SNAPVERTEX(bpy.types.Operator): # ------------------------------
bl_label = "Snap Unselected Vertices"
bl_idname = 'dmr_smash.snap_vertices'
bl_description = 'Snaps unselected vertices to selected based on closeness' + \
'\nNOTE: May not work for objects with Shape Keys';
testRange : bpy.props.FloatProperty(name = "", default = 0.001, precision = 4);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
layout = self.layout;
row = layout.row();
row.label(text = "Match Distance");
row.prop(self, "testRange");
sourcename = "";
column = layout.column();
def execute(self, context):
print("-" * 80)
activeObject = context.object;
if activeObject.type != 'MESH':
self.report({'WARNING'}, 'Active object "%s" is not a mesh' % activeObject.name);
return {'FINISHED'}
rangeAmount = self.testRange;
hits = 0;
mode = GetViewMode();
SetViewMode('OBJECT'); # Update Selected
selectedVertices = [];
unselectedVertices = [];
# Sort selected and unselected
for v in activeObject.data.vertices:
if v.select:
selectedVertices.append(v);
else:
unselectedVertices.append(v);
# Find and snap
for v in selectedVertices:
closestVertices = FindClosestVertices(v, unselectedVertices, rangeAmount);
for v2 in closestVertices:
unselectedVertices.remove(v2);
v2.co = (v.co[0], v.co[1], v.co[2]);
hits += 1;
self.report({'INFO'}, 'Snapped %d Vertices(s)' % hits);
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_SNAPVERTEX);
class DMR_SMASH_DUPLICATEANDMIRROR(bpy.types.Operator): # ------------------------------
bl_label = "Duplicate and Mirror"
bl_idname = 'dmr_smash.dup_and_mirror'
bl_description = 'Duplicates selected vertices and mirrors to other side' + \
"Performs a Duplication, X Mirror, Remove Doubles, Flip Normals, and Mirror UV" + \
"NOTE: UVs will be incorrect for overlapping geometry";
def execute(self, context):
selectedObject = context.active_object;
if selectedObject.type == 'MESH':
bpy.ops.mesh.duplicate_move(MESH_OT_duplicate={"mode":1}, TRANSFORM_OT_translate={"value":(0, 0, 0), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":False, "use_proportional_edit":False, "proportional_edit_falloff":'LINEAR', "proportional_size":0.00813916, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False})
mode = GetViewMode();
SetViewMode('OBJECT'); # Update Selected
SetViewMode(mode);
mesh = selectedObject.data;
mesh.update();
pivot = bpy.context.scene.tool_settings.transform_pivot_point;
cursorLoc = (
context.scene.cursor.location[0],
context.scene.cursor.location[1],
context.scene.cursor.location[2]
);
bpy.context.scene.tool_settings.transform_pivot_point = 'CURSOR';
context.scene.cursor.location = (selectedObject.location[0], selectedObject.location[1], selectedObject.location[2]);
bpy.ops.transform.mirror(orient_type='GLOBAL', orient_matrix=((1, 0, 0), (0, 1, 0), (0, 0, 1)), orient_matrix_type='GLOBAL', constraint_axis=(True, False, False), use_proportional_edit=False, proportional_edit_falloff='LINEAR', proportional_size=0.00813916, use_proportional_connected=True, use_proportional_projected=False)
#bpy.ops.mesh.normals_make_consistent(inside=False)
bpy.ops.mesh.flip_normals();
mode = GetViewMode();
SetViewMode('OBJECT'); # Update Selected
SetViewMode(mode);
bpy.ops.mesh.remove_doubles(threshold=0.00001, use_unselected=True);
bpy.ops.uv.muv_mirror_uv(axis='X')
bpy.context.scene.tool_settings.transform_pivot_point = pivot;
context.scene.cursor.location = cursorLoc;
return {'FINISHED'}
classlist.append(DMR_SMASH_DUPLICATEANDMIRROR);
class DMR_SMASH_SNAPSELECTIONTOACTIVE(bpy.types.Operator):
bl_label = "Duplicate and Mirror"
bl_idname = 'dmr_smash.display_edge_seams'
def execute(self, context):
bpy.ops.view3d.snap_selected_to_active();
return {'FINISHED'}
classlist.append(DMR_SMASH_SNAPSELECTIONTOACTIVE);
class DMR_SMASH_DISPLAY_SEAMS(bpy.types.Operator):
bl_label = ""; bl_idname = 'dmr_smash.display_edge_seams'
def execute(self, context):
bpy.context.space_data.overlay.show_edge_seams = \
not bpy.context.space_data.overlay.show_edge_seams;
return {'FINISHED'}
classlist.append(DMR_SMASH_DISPLAY_SEAMS);
class DMR_SMASH_DISPLAY_SHARP(bpy.types.Operator):
bl_label = ""; bl_idname = 'dmr_smash.display_edge_sharps'
def execute(self, context):
bpy.context.space_data.overlay.show_edge_sharp = \
not bpy.context.space_data.overlay.show_edge_sharp;
return {'FINISHED'}
classlist.append(DMR_SMASH_DISPLAY_SHARP);
class DMR_SMASH_DISPLAY_CREASE(bpy.types.Operator):
bl_label = ""; bl_idname = 'dmr_smash.display_edge_creases'
def execute(self, context):
bpy.context.space_data.overlay.show_edge_crease = \
not bpy.context.space_data.overlay.show_edge_crease;
return {'FINISHED'}
classlist.append(DMR_SMASH_DISPLAY_CREASE);
class DMR_SMASH_DISPLAY_NORMALS(bpy.types.Operator):
bl_label = ""; bl_idname = 'dmr_smash.display_edge_normals'
def execute(self, context):
bpy.context.space_data.overlay.show_split_normals = \
not bpy.context.space_data.overlay.show_split_normals;
return {'FINISHED'}
classlist.append(DMR_SMASH_DISPLAY_NORMALS);
class DMR_SMASH_BAKESHAPEKEYS(bpy.types.Operator):
bl_label = "Bake Shape Keys"
bl_idname = 'dmr_smash.bake_shape_keys'
bl_description = 'Bakes Shape Keys of selected Objects';
keepFinalKey : bpy.props.BoolProperty(name = "Keep Final Key", default = True);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
layout = self.layout;
layout.prop(self, "keepFinalKey");
layout.label(text = 'If enabled, result is kept as "Baked"');
def execute(self, context):
hits = 0;
oldactive = context.active_object;
if len(context.selected_objects) == 0:
self.report({'WARNING'}, "No objects selected");
return {'FINISHED'}
for obj in context.selected_objects:
if obj.type == "MESH":
# No Shape Keys exist for object
if obj.data.shape_keys == None:
continue;
shape_keys = obj.data.shape_keys.key_blocks;
count = len(shape_keys);
if count == 0:
continue;
bpy.context.view_layer.objects.active = obj;
# Create new Key using existing Keys' values
bpy.ops.object.shape_key_add(from_mix=True);
# Remove all Keys except for the newly created one
for i in range(0, count):
obj.active_shape_key_index = 0;
bpy.ops.object.shape_key_remove(all=False)
# Set new Key's name
if self.keepFinalKey:
shape_keys[0].name = "(Baked)";
# Remove new Key
else:
bpy.ops.object.shape_key_remove(all = True);
hits += 1;
if hits == 0:
self.report({'WARNING'}, "No objects modified");
else:
self.report({'INFO'}, "%d Object(s) Modified" % hits);
bpy.context.view_layer.objects.active = oldactive;
return {'FINISHED'}
classlist.append(DMR_SMASH_BAKESHAPEKEYS);
class DMR_SMASH_RESETSHAPEKEYSVERTEX(bpy.types.Operator):
bl_label = "Reset Vertex Shape Keys"
bl_idname = 'dmr_smash.reset_vertex_shape_keys'
bl_description = 'Sets shape key positions of selected vertices to "Basis" for all keys';
def execute(self, context):
oldactive = context.active_object;
if len(context.selected_objects) == 0:
self.report({'WARNING'}, "No objects selected");
return {'FINISHED'}
for obj in context.selected_objects:
if obj.type == "MESH":
# No Shape Keys exist for object
if obj.data.shape_keys == None: continue;
shape_keys = obj.data.shape_keys.key_blocks;
if len(shape_keys) == 0: continue;
keyindex = {};
basis = shape_keys[0];
bpy.context.view_layer.objects.active = obj;
oldactivekey = obj.active_shape_key_index;
for i in range(0, len(shape_keys)):
keyindex[ shape_keys[i].name ] = i;
# For all keys...
for sk in shape_keys:
obj.active_shape_key_index = keyindex[sk.name];
bpy.ops.mesh.blend_from_shape(shape = basis.name, add = False);
obj.active_shape_key_index = oldactivekey;
bpy.context.view_layer.objects.active = oldactive;
return {'FINISHED'}
classlist.append(DMR_SMASH_RESETSHAPEKEYSVERTEX);
class DMR_SMASH_SHAPEKEYZERO(bpy.types.Operator):
bl_label = "Clear Vertex Shape Key"
bl_idname = 'dmr_smash.zero_shape_key'
bl_description = 'Sets shape key positions of selected vertices to base Shape Key';
def execute(self, context):
oldactive = context.active_object;
objects = [x for x in context.selected_objects];
if oldactive != None and oldactive not in objects:
objects.append(oldactive);
if len(objects) == 0:
self.report({'WARNING'}, "No objects selected");
return {'FINISHED'}
for obj in objects:
if obj.type == "MESH":
# No Shape Keys exist for object
if obj.data.shape_keys == None: continue;
shape_keys = obj.data.shape_keys.key_blocks;
if len(shape_keys) == 0: continue;
bpy.context.view_layer.objects.active = obj;
bpy.ops.mesh.blend_from_shape(shape = shape_keys[0].name, blend = 1, add = False);
bpy.context.view_layer.objects.active = oldactive;
return {'FINISHED'}
classlist.append(DMR_SMASH_SHAPEKEYZERO);
class DMR_SMASH_DELETEINVERTEXGROUP(bpy.types.Operator):
bl_label = "Delete Vertices in Vertex Group"
bl_idname = 'dmr_smash.remove_vertices_in_group'
bl_description = 'Deletes vertices in named vertex group for selected objects';
groupname : bpy.props.StringProperty(name = "Group Name", default = "");
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
layout = self.layout;
layout.prop(self, "groupname");
def execute(self, context):
selectedObject = context.active_object;
mode = SetViewMode('OBJECT'); # Update Selected
hits = 0;
objecthits = 0;
for selectedObject in context.selected_objects:
if selectedObject.type == 'MESH':
SetViewMode('OBJECT');
targetVerts = FindVertexGroupVertices(selectedObject, self.groupname);
if len(targetVerts) == 0:
continue;
objecthits += 1;
# Deselect all vertices
for v in selectedObject.data.vertices:
v.select = False;
# Select all vertices in group
for v in targetVerts:
v.select = True;
hits += 1;
# Delete selected
SetViewMode('EDIT');
bpy.ops.mesh.delete(type='VERT');
SetViewMode('OBJECT');
SetViewMode(mode);
if objecthits == 0:
self.report({"WARNING"}, "No objects with specified group found.");
elif hits == 0:
self.report({"WARNING"}, "No vertices in specified group found.");
else:
self.report({"INFO"}, "%d vertices modified total in %d objects" % (hits, objecthits));
return {'FINISHED'}
classlist.append(DMR_SMASH_DELETEINVERTEXGROUP);
if 1: # Vertex Matching Operators
def dmr_matchDraw(op, context, plural):
layout = op.layout;
section = layout.column();
section.prop(op, "matchInGroup");
if op.matchInGroup:
section = section.box();
section.label(text = "Source Vertex Group");
section.prop(op, "groupname");
section = layout.row();
section.label(text = "Match Distance");
section.prop(op, "testRange");
sourcename = "";
column = layout.column();
# Draw selected object names
if len(context.selected_objects) <= 1:
column.label(text = "<Need at least 2 objects selected>");
else:
objlist = "";
activeObject = context.object;
column.label(text = '"' + activeObject.name + '"' + "'s " + plural + " to...");
for selectedObject in context.selected_objects:
if selectedObject == activeObject:
continue;
if activeObject.type != 'MESH':
continue;
column.label(text = " >" + selectedObject.name);
def dmr_matchPre(self, context):
activeObject = context.object;
# Check if Mesh
if activeObject.type != 'MESH':
self.report({'WARNING'}, 'Active object "%s" is not a mesh' % activeObject.name);
return False;
# Check if there's selected
if len(context.selected_objects) <= 1:
self.report({'WARNING'}, 'Need at least 2 objects selected');
return False;
# Check for vertex group if needed
if self.matchInGroup:
if self.groupname not in activeObject.vertex_groups.keys():
self.report({'WARNING'}, 'Vertex Group "%s" not found in "%s"' % (self.groupname, activeObject.name));
return False;
return True;
def dmr_matchGetVGroups(self, context):
items = [];
for vg in context.object.vertex_groups:
items.append( (vg.name, vg.name, 'Only check proximity to vertices in "%s"' % vg.name) );
return items;
class DMR_SMASH_MATCHVERTEX(bpy.types.Operator):
bl_label = "Match Vertex"
bl_idname = 'dmr_smash.match_vertex'
bl_description = 'Matches vertex positions of selected objects to those of the active object based on closeness';
bl_description += '\nNOTE: Vertex offsets are based off of origin';
bl_description += '\nNOTE: Does NOT work if selected mesh has shape keys. Use Inner Vertex Match in that case';
matchInGroup : bpy.props.BoolProperty(name = "Only Match In Vertex Group", default = False);
groupname : bpy.props.EnumProperty(items = dmr_matchGetVGroups, name = "", default = None);
testRange : bpy.props.FloatProperty(name = "", default = 0.01, precision = 4);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
dmr_matchDraw(self, context, "vertices");
def execute(self, context):
if not dmr_matchPre(self, context):
return {'FINISHED'}
activeObject = context.object;
rangeAmount = self.testRange;
hits = 0;
modifiedhits = 0;
mode = SetViewMode('OBJECT'); # Update Selected
# Find source vertices
sourceVertices = activeObject.data.vertices;
if self.matchInGroup:
sourceVertices = FindVertexGroupVertices(activeObject, self.groupname);
print(len(sourceVertices));
# Find objects
for selectedObject in context.selected_objects:
if (selectedObject == activeObject) or (selectedObject.type) != 'MESH':
continue;
print("\t" + selectedObject.name)
# Match
for v in selectedObject.data.vertices:
sv = FindClosestVertex(v, sourceVertices, rangeAmount);
hits += 1;
if sv != None:
print(sv);
if (v.co[0] != sv.co[0]) or (v.co[1] != sv.co[1]) or (v.co[2] != sv.co[2]):
modifiedhits += 1;
v.co = (sv.co[0], sv.co[1], sv.co[2]);
print(v.co);
self.report({'INFO'}, 'Modified %d out of %d Vertices(s) sourced from "%s"' % (modifiedhits, hits, activeObject.name));
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_MATCHVERTEX);
class DMR_SMASH_MATCHVERTEXINNER(bpy.types.Operator):
bl_label = "Match Vertex Inner"
bl_idname = 'dmr_smash.match_vertex_inner'
bl_description = 'Matches vertex positions of unselected selected vertices to those of selected based on closeness';
testRange : bpy.props.FloatProperty(name = "", default = 0.01, precision = 4);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
layout = self.layout;
section = layout.row();
section.label(text = "Match Distance");
section.prop(self, "testRange");
def execute(self, context):
activeObject = context.object;
rangeAmount = self.testRange;
hits = 0;
modifiedhits = 0;
mode = SetViewMode('OBJECT'); # Update Selected
SetViewMode('EDIT');
# Find source and target vertices
sourceVertices = [];
targetVertices = [];
for v in activeObject.data.vertices:
if v.select:
sourceVertices.append(v);
else:
targetVertices.append(v);
v.select = False;
bpy.context.scene.tool_settings.transform_pivot_point = 'CURSOR';
# Match
for v in targetVertices:
sv = FindClosestVertex(v, sourceVertices, rangeAmount);
if sv != None:
hits += 1;
if (v.co[0] != sv.co[0]) or (v.co[1] != sv.co[1]) or (v.co[2] != sv.co[2]):
modifiedhits += 1;
#v.co = (sv.co[0], sv.co[1], sv.co[2]);
#print(v.co);
v.select = True;
context.scene.cursor.location = (sv.co[0], sv.co[1], sv.co[2]);
bpy.ops.view3d.snap_selected_to_cursor(use_offset=False);
v.select = False;
self.report({'INFO'}, 'Modified %d out of %d Vertices(s)' % (modifiedhits, hits));
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_MATCHVERTEXINNER);
class DMR_SMASH_MATCHNORMALS(bpy.types.Operator):
bl_label = "Match Normals"
bl_idname = 'dmr_smash.match_normals'
bl_description = 'Matches normals of selected objects to those of the active object based on closeness of vertices' + \
'\nUseful for correcting normals on detetched face expression meshes.' + \
'\n"Auto Smooth" for selected meshes must be enabled for custom normals.' + \
'\nNOTE: Vertex offsets are based off of origin';
matchInGroup : bpy.props.BoolProperty(name = "Only Match In Vertex Group", default = False);
groupname : bpy.props.EnumProperty(items = dmr_matchGetVGroups, name = "", default = None);
testRange : bpy.props.FloatProperty(name = "", default = 0.01, precision = 4);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
dmr_matchDraw(self, context, "normals");
def execute(self, context):
if not dmr_matchPre(self, context):
return {'FINISHED'}
activeObject = context.object;
rangeAmount = self.testRange;
hits = 0;
mode = SetViewMode('OBJECT'); # Update Selected
# Find source vertices
sourceVertices = activeObject.data.vertices;
if self.matchInGroup:
sourceVertices = FindVertexGroupVertices(activeObject, self.groupname);
# Find objects
for selectedObject in context.selected_objects:
if selectedObject == activeObject or selectedObject.type != 'MESH':
continue;
# Match Normals
normals = [];
for v in selectedObject.data.vertices:
n = v.normal;
v2 = FindClosestVertex(v, sourceVertices, rangeAmount);
if v2 != None:
hits += 1;
n = v2.normal;
normals.append(n);
# Apply
selectedObject.data.normals_split_custom_set_from_vertices(normals);
self.report({'INFO'}, 'Matched Normals for %d Vertices(s) sourced from "%s"' % (hits, activeObject.name));
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_MATCHNORMALS);
class DMR_SMASH_MATCHWEIGHTS(bpy.types.Operator):
bl_label = "Match Weights"
bl_idname = 'dmr_smash.match_weights'
bl_description = 'Matches vertex weights of selected objects to those of the active object based on closeness';
bl_description += '\nNOTE: Vertex offsets are based off of origin';
matchInGroup : bpy.props.BoolProperty(name = "Only Match In Vertex Group", default = False);
groupname : bpy.props.EnumProperty(items = dmr_matchGetVGroups, name = "", default = None);
testRange : bpy.props.FloatProperty(name = "", default = 0.01, precision = 4);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
dmr_matchDraw(self, context, "weights");
def execute(self, context):
if not dmr_matchPre(self, context):
return {'FINISHED'}
activeObject = context.object;
rangeAmount = self.testRange;
hits = 0;
weighthits = 0;
mode = SetViewMode('OBJECT'); # Update Selected
# Find source vertices
sourceVertices = activeObject.data.vertices;
if self.matchInGroup:
sourceVertices = FindVertexGroupVertices(activeObject, self.groupname);
sourceGroups = activeObject.vertex_groups;
sourceGroupNames = [g.name for g in sourceGroups];
# Find objects
for selectedObject in context.selected_objects:
if selectedObject == activeObject or selectedObject.type != 'MESH':
continue;
otherGroups = selectedObject.vertex_groups;
otherGroupsNames = [g.name for g in sourceGroups];
# Match (v = other, sourceVertex = source)
for v in selectedObject.data.vertices:
sourceVertex = FindClosestVertex(v, sourceVertices, rangeAmount);
if sourceVertex != None:
ClearVertexWeights(v, otherGroups);
hits += 1;
for vge2 in sourceVertex.groups:
SetVertexWeight(v, vge2.weight, sourceGroups[vge2.group].name, otherGroups);
weighthits += 1;
self.report({'INFO'}, 'Matched %d Weights(s) for %s Vertice(s) sourced from "%s"' % (weighthits, hits, activeObject.name));
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_MATCHWEIGHTS);
# Weights =============================================================================
class DmrSmashToolsPanel_Weights(bpy.types.Panel):
bl_label = "Weights"
bl_idname = "DMR_SMASH_PT_WEIGHTS"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Dmr" # Name of sidebar
bl_parent_id = 'DMR_SMASH_PT_MAINPANEL' # Nested Panels
def draw(self, context):
layout = self.layout
section = layout.column();
sub = section.column();
sub.operator('dmr_smash.correct_weights', icon = 'SOLO_OFF')
if PanelInEditMode():
sub = section.column();
sub.operator('dmr_smash.toggle_editmode_weights')
sub.operator('dmr_smash.mirror_selected_weights')
sub.operator('dmr_smash.weights_to_selected')
sub = section.column();
#sub.operator('dmr_smash.copy_right_groups')
#sub.operator('dmr_smash.clear_right_groups')
sub.operator('dmr_smash.remove_empty_groups')
sub.operator('dmr_smash.clear_weights_from_selected')
sub.operator('dmr_smash.clean_and_limit')
classlist.append(DmrSmashToolsPanel_Weights);
if 1: # Weight Operators
class DMR_SMASH_MIRRORVERTEXGROUP(bpy.types.Operator):
bl_label = "Mirror Selected Weights"
bl_idname = 'dmr_smash.mirror_selected_weights'
bl_description = 'Mirrors weights of selected vertices in group';
def execute(self, context):
bpy.ops.object.vertex_group_mirror(use_topology = False);
return {'FINISHED'}
classlist.append(DMR_SMASH_MIRRORVERTEXGROUP);
class DMR_SMASH_CLEARWEIGHTS(bpy.types.Operator):
bl_label = "Clear Groups From Selected"
bl_idname = 'dmr_smash.clear_weights_from_selected'
bl_description = 'Clears all vertex groups from selected vertices';
def execute(self, context):
selectedObject = context.active_object;
if selectedObject.type == 'MESH':
mode = SetViewMode('OBJECT'); # Update Selected
vertexGroups = selectedObject.vertex_groups;
# Remove Groups
for v in selectedObject.data.vertices:
if v.select:
ClearVertexWeights(v, vertexGroups);
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_CLEARWEIGHTS);
class DMR_SMASH_CLEANWEIGHTS(bpy.types.Operator):
bl_label = "Clean Weights from Selected"
bl_idname = 'dmr_smash.clean_weights_from_selected'
bl_description = 'Cleans weights from selected objects';
def execute(self, context):
mode = SetViewMode('OBJECT'); # Update Selected
count = 0;
for obj in context.selected_objects:
if obj.type == 'MESH':
vertexGroups = obj.vertex_groups;
# Remove Groups
for v in obj.data.vertices:
if v.select:
for g in v.groups:
# Pop vertex from group
if g.weight == 0:
vertexGroups[g.group].remove([v.index])
count += 1;
self.report({'INFO'}, "Cleaned %s weights" % count);
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_CLEANWEIGHTS);
class DMR_SMASH_REMOVEEMPTYGROUPS(bpy.types.Operator):
bl_label = "Remove Empty Groups"
bl_idname = 'dmr_smash.remove_empty_groups'
bl_description = 'Removes Vertex Groups with no weight data';
removeZero : bpy.props.BoolProperty(name = "Ignore Zero Weights", default = True);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
layout = self.layout;
layout.prop(self, "removeZero");
def execute(self, context):
for selectedObject in context.selected_objects:
if selectedObject.type != 'MESH':
continue;
mode = SetViewMode('OBJECT'); # Update Selected
vertexGroups = selectedObject.vertex_groups;
targetGroups = [v for v in vertexGroups];
# Find and pop groups with vertex data
for v in selectedObject.data.vertices:
for g in v.groups:
realGroup = vertexGroups[g.group];
if realGroup in targetGroups:
if g.weight > 0 or not self.removeZero:
targetGroups.remove(realGroup);
if len(targetGroups) == 0:
break;
# Remove Empty Groups
count = len(targetGroups);
if count == 0:
self.report({'INFO'}, "No Empty Groups Found");
else:
for g in targetGroups:
vertexGroups.remove(g);
self.report({'INFO'}, "Found and removed %d empty group(s)" % count);
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_REMOVEEMPTYGROUPS);
class DMR_SMASH_REMOVERIGHTSIDEGROUPS(bpy.types.Operator):
bl_label = "Remove Right Bone Groups"
bl_idname = 'dmr_smash.clear_right_groups'
bl_description = 'Removes vertex groups that have a name with "R" as the final letter\nExample: "ShoulderR" will be erased';
def execute(self, context):
selectedObject = context.active_object;
if selectedObject.type == 'MESH':
mode = SetViewMode('OBJECT'); # Update Selected
vertexGroups = selectedObject.vertex_groups;
# Free Right Side Vertices
for g in vertexGroups:
if lastLetter(g.name) == "R":
vertexGroups.remove(g)
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_REMOVERIGHTSIDEGROUPS);
class DMR_SMASH_CORRECTWEIGHTS(bpy.types.Operator):
bl_label = "Correct Right Weights"
bl_idname = 'dmr_smash.correct_weights'
bl_description = "Syncs right side weights to left side's. Works for multiple objects.";
bl_description += "\nCreates new vertex groups for mirrored sides where needed."
bl_description += "\nMagic formula for all your weight mirroring woes (Assuming your mesh is centered about its pivot)."
def execute(self, context):
print("-" * 80)
# BY "RIGHT" I MEAN FROM THE MODEL's POINT OF VIEW!
mode = GetViewMode();
SetViewMode('OBJECT'); # Update Selected
for obj in context.selected_objects:
if obj.type != 'MESH':
continue;
# Init data
sourceVertices = obj.data.vertices;
vertexList = [x for x in obj.data.vertices];
vertexGroups = {}
for vg in obj.vertex_groups:
vertexGroups[vg.name] = vg;
vertexGroups[vg.index] = vg;
# Make swapped groups
oppositeGroups = {};
for vg in obj.vertex_groups:
if lastLetter(vg.name) in ["R", "L"]:
swapname = switchName(vg.name);
print("%s -> %s" % (vg.name, swapname));
# Add vertex group if doesn't exist
if swapname not in vertexGroups.keys():
newgroup = obj.vertex_groups.new(name = swapname);
vertexGroups[newgroup.index] = newgroup;
vertexGroups[newgroup.name] = newgroup;
oppositeGroups[vg.name] = vertexGroups[swapname];
oppositeGroups[vg.index] = vertexGroups[swapname];
else:
oppositeGroups[vg.name] = vg;
oppositeGroups[vg.index] = vg;
print(vg.name);
# Run through vertices
hits = 0;
for v in obj.data.vertices:
# Vertex has been checked
if v not in vertexList:
continue;
# Vertex is centered (No mirror chance)
if v.co[0] == 0.0:
hits += 1;
vertexList.remove(v);
continue;
# Vertex is on right side
if v.co[0] < 0.0:
vertexList.remove(v);
vx = -v.co[0];
# Find opposite vertex
for vSource in vertexList:
if vSource.co[0] == vx:
vertexList.remove(vSource);
hits += 2;
# Clear all weights for right vert
ClearVertexWeights(v, vertexGroups);
# For each group in left (source) vertex....
for vge2 in vSource.groups:
# Update weights for opposite group
oppositeGroups[vge2.group].add([v.index], vge2.weight, 'ADD');
break;
info = "%d / %d Vertex Hit(s) for %s" % (hits, len(obj.data.vertices), obj.name);
self.report({'INFO'}, info);
# Clean Weights
for v in obj.data.vertices:
for vge in v.groups:
vertsToDelete = [];
if vge.weight == 0.0:
vertsToDelete.append(v.index);
vertexGroups[vge.group].remove(vertsToDelete);
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_CORRECTWEIGHTS);
class DMR_SMASH_WEIGHTSTOSELECTED(bpy.types.Operator):
bl_label = "Match Unselected Weights"
bl_idname = 'dmr_smash.weights_to_selected'
bl_description = "Matches unselected vertex weights to selected vertices.";
def execute(self, context):
print("-" * 80)
# BY "RIGHT" I MEAN FROM THE MODEL's POINT OF VIEW!
activeObject = context.active_object;
if activeObject == None:
self.report({'ERROR'}, "No object selected");
return {'FINISHED'}
if activeObject.type != 'MESH':
self.report({'ERROR'}, "Active object is not a mesh");
return {'FINISHED'}
mode = activeObject.mode;
SetViewMode('OBJECT'); # Update Selected
vertexSelected = [];
vertexUnselected = [];
vertexGroups = activeObject.vertex_groups;
groupCount = len(vertexGroups);
# Find selected and unselected
for v in activeObject.data.vertices:
if v.select:
vertexSelected.append(v);
else:
vertexUnselected.append(v);
# Match weights
hits = 0;
for v in vertexSelected:
otherVertices = FindClosestVertices(v, vertexUnselected, 0.01);
for v2 in otherVertices:
vertexUnselected.remove(v2);
# Clear all weights for other vert
for vge2 in v2.groups:
if vge2.group < 0 or vge2.group >= groupCount:
continue;
vertexGroups[vge2.group].remove([v2.index]);
# For each group in selected vertex...
for vge in v.groups:
# Update weights for unselected group
vertexGroups[vge.group].add([v2.index], vge.weight, 'ADD');
hits += 1;
self.report({'INFO'}, "%d Vertex Weights Matched" % hits);
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_WEIGHTSTOSELECTED);
class DMR_SMASH_LEFTGROUPSTORIGHT(bpy.types.Operator): # ------------------------------
bl_label = "Copy Left Weights to Right"
bl_idname = 'dmr_smash.copy_right_groups'
bl_description = 'Takes groups with a final letter of "L", \
\ncreates a new group with a final letter of "R",\
\nand mirrors the weights.';
def execute(self, context):
selectedObject = context.active_object;
if selectedObject.type == 'MESH':
mode = SetViewMode('OBJECT'); # Update Selected
print(selectedObject.name)
vertices = selectedObject.data.vertices;
vertexGroups = selectedObject.vertex_groups;
oppositeGroup = {}
# Free Right Side Vertices
for g in vertexGroups:
if lastLetters(g.name)[0] == "R":
vertexGroups.remove(g)
# Create Halved Group
for g in vertexGroups:
if lastLetters(g.name)[0] == "L":
oppositeGroup[g.index] = vertexGroups.new(name = switchName(g.name) )
validVertices = [];
oppositeVertex = {}
# Find Opposite Vertices
for v in vertices:
if v in validVertices:
continue;
v.select = False;
for v2 in vertices:
if v.undeformed_co.x == -v2.undeformed_co.x \
and v.undeformed_co.y == v2.undeformed_co.y \
and v.undeformed_co.z == v2.undeformed_co.z:
validVertices.append(v)
validVertices.append(v2)
oppositeVertex[v] = v2;
oppositeVertex[v2] = v;
break;
# Apply Weights
for v in validVertices:
for g in v.groups:
if lastLetters(vertexGroups[g.group].name)[0] == "L":
v.select = True
gIndex = vertexGroups[g.group].index;
oppVertex = oppositeVertex[v]
oppositeGroup[g.group].add([oppVertex.index], g.weight, 'ADD');
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_LEFTGROUPSTORIGHT);
class DMR_SMASH_LIMITANDCLEAN(bpy.types.Operator):
bl_label = "Clean and Limit"
bl_idname = 'dmr_smash.clean_and_limit'
bl_description = 'Removes weights with value of 0 and limits max weights to 4';
def execute(self, context):
selectedObject = context.active_object;
mode = SetViewMode('OBJECT'); # Update Selected
for selectedObject in context.selected_objects:
if selectedObject.type == 'MESH':
SetViewMode('WEIGHT_PAINT');
bpy.ops.paint.vert_select_all(action='SELECT');
bpy.ops.object.vertex_group_clean(group_select_mode='ALL', limit=0, keep_single=False);
bpy.ops.object.vertex_group_limit_total(group_select_mode='ALL', limit=4);
bpy.ops.paint.vert_select_all(action='DESELECT')
SetViewMode('OBJECT');
SetViewMode(mode);
return {'FINISHED'}
classlist.append(DMR_SMASH_LIMITANDCLEAN);
# Bones =============================================================================
class DmrSmashToolsPanel_Bones(bpy.types.Panel):
bl_label = "Bones"
bl_idname = "DMR_SMASH_PT_BONES"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Dmr" # Name of sidebar
bl_parent_id = 'DMR_SMASH_PT_MAINPANEL' # Nested Panels
def draw(self, context):
layout = self.layout
section = layout.column();
section = section.column();
section.operator('dmr_smash.clear_pose_transform')
section.operator('dmr_smash.bone_match_mirror')
classlist.append(DmrSmashToolsPanel_Bones);
if 1: # Operators
class DMR_SMASH_CLEARPOSETRANSFORM(bpy.types.Operator):
bl_label = "Clear Selected Bones' Keyframes"
bl_idname = 'dmr_smash.clear_pose_transform'
bl_description = 'Clears Location/Rotation/Scale keyframes from selected pose bones' + \
"\nNOTE: Has not been tested in a while. May not work";
isSimple : bpy.props.BoolProperty(name = "Simple", default = True);
simpleLoc : bpy.props.BoolProperty(name = "Location", default = False);
simpleRot : bpy.props.BoolProperty(name = "Rotation", default = False);
simpleSca : bpy.props.BoolProperty(name = "Scale", default = False);
locX : bpy.props.BoolProperty(name = "Location X", default = False);
locY : bpy.props.BoolProperty(name = "Location Y", default = False);
locZ : bpy.props.BoolProperty(name = "Location Z", default = False);
rotX : bpy.props.BoolProperty(name = "Rotation X", default = False);
rotY : bpy.props.BoolProperty(name = "Rotation Y", default = False);
rotZ : bpy.props.BoolProperty(name = "Rotation Z", default = False);
rotW : bpy.props.BoolProperty(name = "Rotation W", default = False);
scaX : bpy.props.BoolProperty(name = "Scale X", default = False);
scaY : bpy.props.BoolProperty(name = "Scale Y", default = False);
scaZ : bpy.props.BoolProperty(name = "Scale Z", default = False);
processWhole : bpy.props.BoolProperty(name = "Process Entire Action", default = False);
keyframeRangeMin : bpy.props.IntProperty(name = "", default = 0);
keyframeRangeMax : bpy.props.IntProperty(name = "", default = 60);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
layout = self.layout;
layout.prop(self, "isSimple");
# Transforms
if self.isSimple:
box = layout.box();
box.label(text = "Transform Type");
row = box.row();
row.prop(self, "simpleLoc");
row.prop(self, "simpleRot");
row.prop(self, "simpleSca");
else:
box = layout.box();
box.label(text = "Transform Type & Channel");
row = box.row();
row.prop(self, "locX"); row.prop(self, "rotX"); row.prop(self, "scaX");
row = box.row();
row.prop(self, "locY"); row.prop(self, "rotY"); row.prop(self, "scaY");
row = box.row();
row.prop(self, "locZ"); row.prop(self, "rotZ"); row.prop(self, "scaZ");
row = box.row();
row.prop(self, "rotW");
# Range
layout.prop(self, "processWhole");
if not self.processWhole:
section = layout.box();
row = section.row();
row.label(text = "Keyframe Range");
row.prop(self, "keyframeRangeMin");
row.prop(self, "keyframeRangeMax");
def execute(self, context):
context = bpy.context;
selectedObject = context.active_object;
# Checks
bail = False;
if selectedObject.type != 'ARMATURE':
self.report({'ERROR'}, "Selected Object is not an Armature");
bail = True;
action = selectedObject.animation_data.action;
if action == None:
self.report({'ERROR'}, "Invalid action");
bail = True;
targetTransform = [[], [], []];
if self.isSimple:
if self.simpleLoc:
targetTransform[0].extend([0, 1, 2]);
if self.simpleRot:
targetTransform[1].extend([0, 1, 2, 3]);
if self.simpleSca:
targetTransform[2].extend([0, 1, 2]);
else:
if self.locX:
targetTransform[0].append(0);
if self.locY:
targetTransform[0].append(1);
if self.locZ:
targetTransform[0].append(2);
if self.rotX:
targetTransform[1].append(0);
if self.rotY:
targetTransform[1].append(1);
if self.rotZ:
targetTransform[1].append(2);
if self.rotW:
targetTransform[1].append(3);
if self.scaX:
targetTransform[2].append(0);
if self.scaY:
targetTransform[2].append(1);
if self.scaZ:
targetTransform[2].append(2);
if len(targetTransform[0]) == 0 and len(targetTransform[1]) == 0 and len(targetTransform[2]) == 0:
self.report({'WARNING'}, 'No transforms selected for "' + self.bl_label + '"');
bail = True;
minFrame = self.keyframeRangeMin;
maxFrame = self.keyframeRangeMax;
if minFrame > maxFrame:
self.report({'WARNING'}, 'Range corrected for operation "' + self.bl_label + '"');
temp = minFrame;
minFrame = maxFrame;
maxFrame = temp;
if bail:
return {'FINISHED'}
# Determine what to yeet
transformdict = {"location" : 0, "rotation_quaternion" : 1, "scale" : 2}
transformnamelist = ["Loc", "Rot", "Sca"];
print("Action: " + action.name);
# Execution
objectmode = SetViewMode('OBJECT'); # Update Selected
selectedBones = [];
for bone in selectedObject.data.bones:
if bone.select and not bone.hide:
selectedBones.append(bone.name);
for fcu in action.fcurves:
#print(fcu.data_path + " channel " + str(fcu.array_index))
bonename = "";
path = fcu.data_path;
pos = [-1, 0];
# Find string positions
for i in range(0, len(path)):
if path[i] == '"':
# Start of string
if pos[0] == -1:
pos[0] = i + 1;
else:
pos[1] = i;
transformname = path[pos[1] + 3:];
# Skip transforms that aren't in dict
if transformname not in transformdict:
continue;
bonename = path[pos[0]:pos[1]];
# Skip bones that aren't not selected
if bonename not in selectedBones:
continue;
transformtype = transformdict[transformname];
transformchannel = fcu.array_index;
# Skip if transform is not target
if transformchannel not in targetTransform[transformtype]:
continue;
print(bonename + " " + transformnamelist[transformtype] + "[" + str(transformchannel) + "]" + ", Keyframes: " + str(len(fcu.keyframe_points)));
# Delete all keyframes for given transform channel
if self.processWhole:
print("- Clearing all keyframes");
action.fcurves.remove(fcu);
# Delete frames in range
else:
keyframelist = [];
for k in fcu.keyframe_points:
keyframelist.append(k);
for k in keyframelist:
print("- Keyframe %s" % k.co[0]);
if k.co[0] >= minFrame and k.co[0] <= maxFrame:
fcu.keyframe_points.remove(k);
print("=" * 40);
SetViewMode(objectmode);
return {'FINISHED'}
classlist.append(DMR_SMASH_CLEARPOSETRANSFORM);
class DMR_SMASH_BONE_MATCH_MIRROR(bpy.types.Operator):
bl_label = "Match Bone Mirror"
bl_idname = 'dmr_smash.bone_match_mirror'
bl_description = 'Matches positions of selected bones with their mirror based on the last letter\nEx: "KneeR" will be matched to "KneeL"\nNOTE: Does not calculate roll';
def execute(self, context):
selectedObject = context.active_object;
if selectedObject.type != 'ARMATURE':
self.report({'WARNING'}, 'No armature selected"');
objectmode = SetViewMode('OBJECT'); # Update Selected
SetViewMode(objectmode);
print("> Reading Armature \"" + selectedObject.name + "\"...")
editBones = selectedObject.data.edit_bones;
targetLetter = None;
hits = 0;
print(len(editBones));
# Find selected bones
for bone in editBones:
if bone.select:
if lastLetters(bone.name)[0] not in ["L", "R"]:
continue;
targetName = switchName(bone.name);
if targetName in editBones:
mirrorBone = editBones[targetName];
print("%s -> %s" % (bone.name, mirrorBone.name));
bone.head.x = -mirrorBone.head.x;
bone.head.y = mirrorBone.head.y;
bone.head.z = mirrorBone.head.z;
bone.tail.x = -mirrorBone.tail.x;
bone.tail.y = mirrorBone.tail.y;
bone.tail.z = mirrorBone.tail.z;
hits += 1;
self.report({'INFO'}, 'Matched %d Bone(s)"' % hits);
return {'FINISHED'}
classlist.append(DMR_SMASH_BONE_MATCH_MIRROR);
# Etc. =============================================================================
class DmrSmashToolsPanel_Etc(bpy.types.Panel): # ------------------------------
bl_label = "Etc."
bl_idname = "DMR_SMASH_PT_ETC"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = "Dmr" # Name of sidebar
bl_parent_id = 'DMR_SMASH_PT_MAINPANEL' # Nested Panels
def draw(self, context):
layout = self.layout
if PanelInEditMode():
section = layout.column();
section.operator('dmr_smash.toggle_editmode_weights')
section = layout.column();
section.operator('dmr_smash.reset_3dcursor', icon = 'PIVOT_CURSOR')
section.operator('dmr_smash.image_reload', icon = 'IMAGE_DATA')
section.operator('dmr_smash.play_anim', icon = 'PLAY')
section.operator('dmr_smash.toggle_pose', icon = 'ARMATURE_DATA')
section.operator('dmr_smash.object_remove_unused_materials', icon = 'MATERIAL')
#section.operator('dmr_smash.prime_for_smd', icon = 'FILE_CACHE')
section.operator('dmr_smash.toggle_view_transform', icon = 'RESTRICT_RENDER_OFF')
classlist.append(DmrSmashToolsPanel_Etc);
if 1: # Etc. Operators
class DMR_SMASH_PLAYANIM(bpy.types.Operator):
bl_label = "Play/Pause Animation"
bl_idname = 'dmr_smash.play_anim'
bl_description = 'Toggles animation playback';
def execute(self, context):
bpy.ops.screen.animation_play();
return {'FINISHED'}
classlist.append(DMR_SMASH_PLAYANIM);
class DMR_SMASH_TOGGLEPOSE(bpy.types.Operator):
bl_label = "Toggle Pose Mode"
bl_idname = 'dmr_smash.toggle_pose'
bl_description = 'Toggles Pose Mode for all armatures';
#context.scene.objects["_Armature"].data.pose_position
def execute(self, context):
for o in context.scene.objects:
if o.type == 'ARMATURE':
armature = o.data;
if armature.pose_position == 'REST':
armature.pose_position = 'POSE';
else:
armature.pose_position = 'REST'
return {'FINISHED'}
classlist.append(DMR_SMASH_TOGGLEPOSE);
class DMR_SMASH_IMGRELOAD(bpy.types.Operator):
bl_label = "Reload All Images"
bl_idname = 'dmr_smash.image_reload'
bl_description = 'Reloads all images from files';
def execute(self, context):
for image in bpy.data.images:
image.reload()
return {'FINISHED'}
classlist.append(DMR_SMASH_IMGRELOAD);
class DMR_SMASH_RESET3DCURSOR(bpy.types.Operator): # ------------------------------
bl_label = "Reset 3D Cursor"
bl_idname = 'dmr_smash.reset_3dcursor'
bl_description = 'Resets 3D cursor to (0, 0, 0)';
def execute(self, context):
context.scene.cursor.location = (0.0, 0.0, 0.0)
return {'FINISHED'}
classlist.append(DMR_SMASH_RESET3DCURSOR);
class DMR_SMASH_CLEANMATERIALS(bpy.types.Operator): # ------------------------------
bl_label = "Clean Materials"
bl_idname = 'dmr_smash.clean_materials'
bl_description = 'Removes materials that have no users';
def execute(self, context):
targetMaterials = [m for m in bpy.data.materials];
print("-" * 80)
# Find used materials
for obj in context.scene.objects:
for m in obj.material_slots:
if m.material in targetMaterials:
targetMaterials.remove(m.material);
# Delete unused materials
hits = len(targetMaterials);
if hits == 0:
self.report({'INFO'}, 'No materials removed');
else:
for m in targetMaterials:
print('Removing "%s"' % m.name);
bpy.data.materials.remove(m)
self.report({'INFO'}, 'Removed %s Materials' % hits);
return {'FINISHED'}
classlist.append(DMR_SMASH_CLEANMATERIALS);
class DMR_SMASH_SMDPRIME(bpy.types.Operator): # ------------------------------
bl_label = "Prime Data for SMD"
bl_idname = 'dmr_smash.prime_for_smd'
bl_description = 'Targets objects with given prefix.';
bl_description += '\nRenames meshes to their object name with a lowercase starting letter' + \
'\nRenames object materials to the object name';
targetname : bpy.props.StringProperty(name = "Model Prefix", default = "Wiz");
charname : bpy.props.StringProperty(name = "VIS Name", default = "zelda");
ophelp : bpy.props.BoolProperty(name = "Help", default = False);
def invoke(self, context, event):
wm = context.window_manager;
return wm.invoke_props_dialog(self);
def draw(self, context):
layout = self.layout;
layout.label(text = "Prefix of object names");
layout.prop(self, "targetname");
layout.label(text = "Name to replace prefix with for VIS objects");
layout.prop(self, "charname");
box = layout.box().column();
box.prop(self, "ophelp");
if self.ophelp:
box.label(text = "Material names are created based on");
box.label(text = "the case of the first letter of an object's name.");
box.label(text = "Uppcase -> Mat Name = Object name");
box.label(text = "Lowcase -> Mat Name = Prefix swapped with VIS name");
box = layout.box().column();
box.label(text = 'Ex: with Model Prefix = "Wiz", VIS Name = "zelda"');
box.label(text = '"Wiz_Hair" -> "Wiz_Hair"');
box.label(text = '"wiz_Hot_VIS_O_OBJShape" -> "zelda_Hot_VIS_O_OBJShape"');
def execute(self, context):
namestart = str(self.targetname[0]);
TARGETNAME = namestart.upper() + self.targetname[1:];
TARGETNAME2 = namestart.lower() + self.targetname[1:];
CHARNAME = self.charname;
print("=" * 100)
matDict = {};
matCountDict = {};
targetObjects = [];
def getNodeCount(nodes):
if len(nodes) == 0:
return 1;
for n in nodes:
return getNodeCount(n);
# Find Material Names
for obj in bpy.data.objects:
if obj.type != 'MESH': continue;
if obj.name[:len(TARGETNAME)] != TARGETNAME and \
obj.name[:len(TARGETNAME)] != TARGETNAME2: continue;
targetObjects.append(obj);
mat = obj.active_material;
if mat == None: continue;
if mat.name not in matCountDict:
matCountDict[mat.name] = [obj.name];
matDict[mat.name] = mat;
else:
matCountDict[mat.name].append(obj.name);
obj.data.name = "mesh_" + obj.name;
obj.select_set(False);
#print("%s: %s" % (obj.name, obj.active_material));
# Report Materials
print("%d Materials Found" % len(matCountDict));
problemMat = [];
for name in matCountDict.keys():
if len(matCountDict[name]) > 1:
problemMat.append(matDict[name]);
infostring = "";
infotype = 'INFO';
if len(problemMat) != 0:
print("%d Non-Unique Materials found" % len(problemMat));
print('Click the "New Material" button in the Material tab for the following materials' +
'\nafter making Node Groups for node structures');
problemnames = "";
for mat in problemMat:
print(mat.name);
for objname in matCountDict[mat.name]:
print("\t%s" % objname)
problemnames += objname + ", ";
# Remove problem objects from material check
for obj in targetObjects:
if obj.name == objname:
obj.select_set(True);
targetObjects.remove(obj)
break;
infotype = 'WARNING';
infostring = " | These objects have non-unique materials: " + problemnames;
#return {'FINISHED'}
# Update Material Names & Report Objects
for obj in targetObjects:
obj.select_set(True);
newname = TARGETNAME + obj.name[len(TARGETNAME):];
# Object name has lowercase of target name
if obj.name[:len(TARGETNAME2)] == TARGETNAME2:
newname = CHARNAME + obj.name[len(TARGETNAME2):];
# Print update if name is different
if obj.active_material.name != newname:
print('Changing material "%s" of Object "%s" to "%s"' % (obj.active_material.name, obj.name, newname));
obj.active_material.name = newname;
print(newname)
infostring = ("Modified %d object names" % len(targetObjects)) + infostring;
self.report({infotype}, infostring);
return {'FINISHED'}
classlist.append(DMR_SMASH_SMDPRIME);
class DMR_SMASH_TOGGLEVIEWTRANSFORM(bpy.types.Operator): # ------------------------------
bl_label = "Toggle View Transform"
bl_idname = 'dmr_smash.toggle_view_transform'
bl_description = 'Toggles view transform between "Raw" and "Filmic". \nUse "Raw" when rendering to speed up rendering time';
def execute(self, context):
if bpy.context.scene.view_settings.view_transform == 'Raw':
bpy.context.scene.view_settings.view_transform = 'Filmic'
else:
bpy.context.scene.view_settings.view_transform = 'Raw'
return {'FINISHED'}
classlist.append(DMR_SMASH_TOGGLEVIEWTRANSFORM);
class DMR_SMASH_EDITMODEWEIGHTS(bpy.types.Operator): # ------------------------------
bl_label = "Toggle Edit Mode Weights"
bl_idname = 'dmr_smash.toggle_editmode_weights'
bl_description = 'Toggles Weight Display for Edit Mode';
def execute(self, context):
bpy.context.space_data.overlay.show_weight = not bpy.context.space_data.overlay.show_weight;
return {'FINISHED'}
classlist.append(DMR_SMASH_EDITMODEWEIGHTS);
class DMR_SMASH_REMOVEUNUSEDMATERIALSLOTS(bpy.types.Operator):
bl_label = "Remove Unused Mat Slots"
bl_idname = 'dmr_smash.object_remove_unused_materials'
bl_description = 'Removes unused material slots from selected objects';
def execute(self, context):
for obj in context.selected_objects:
if obj.type == "MESH":
SetActiveObject(obj);
bpy.ops.object.material_slot_remove_unused()
return {'FINISHED'}
classlist.append(DMR_SMASH_REMOVEUNUSEDMATERIALSLOTS);
# UV Panel =============================================================================
class DmrSmashToolsUVPanel(bpy.types.Panel): # ------------------------------
bl_label = "Dmr Smash Tools UV"
bl_idname = "DMR_SMASH_PT_UVPANEL"
bl_space_type = 'IMAGE_EDITOR'
bl_region_type = 'UI'
bl_category = "Dmr" # Name of sidebar
def draw(self, context):
layout = self.layout
section = layout.box().column()
section.label(text = "Etc", icon = 'SOLO_OFF')
section.operator('dmr_smash.image_reload', icon = 'IMAGE_DATA')
section.operator('dmr_smash.toggle_view_transform', icon = 'RESTRICT_RENDER_OFF')
classlist.append(DmrSmashToolsUVPanel);
# Register =====================================================
def register():
for op in classlist:
bpy.utils.register_class(op);
def unregister():
for op in classlist:
bpy.utils.unregister_class(op);
if __name__ == "__main__":
register()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from nose import tools as nose
from tests.integration.resource import ResourceTestCase
class AlbumResourceTestCase(ResourceTestCase):
"""
GET /albums/ [artist=<int>]
200 OK
401 Unauthorized
POST /albums/ name=<str> [year=<int>] [cover_url=<str>]
201 Created
400 Bad Request
401 Unauthorized
GET /albums/<id>/ [fulltree=<bool>]
200 OK
401 Unauthorized
404 Not Found
PUT /albums/<id>/ [name=<str>] [year=<int>] [cover_url=<str>]
204 No Content
400 Bad Request
401 Unauthorized
404 Not Found
DELETE /albums/<id>/
204 No Content
401 Unauthorized
404 Not Found
"""
def get_payload(self):
return {
'name': "Keep rockin'",
}
# Unauthorized
def test_unauthorized_access(self):
resp = self.get('/albums/', authenticate=False)
nose.ok_(resp.status_code, 401)
resp = self.get('/albums/1/', authenticate=False)
nose.ok_(resp.status_code, 401)
# POST
payload = self.get_payload()
resp = self.post('/albums/', data=payload, authenticate=False)
nose.ok_(resp.status_code, 401)
# PUT
payload = self.get_payload()
resp = self.put('/albums/1/', data=payload, authenticate=False)
nose.ok_(resp.status_code, 401)
# DELETE
resp = self.delete('/albums/1/', authenticate=False)
nose.ok_(resp.status_code, 401)
# Authorized
def test_album_base_resource(self):
resp = self.get('/albums/')
nose.eq_(resp.status_code, 200)
nose.ok_(resp.json.has_key('item_count'))
nose.ok_(resp.json.has_key('items'))
nose.ok_(resp.json.has_key('page'))
nose.ok_(resp.json.has_key('page_size'))
nose.ok_(resp.json.has_key('pages'))
def test_nonexistent_album(self):
resp = self.get('/albums/123/')
nose.eq_(resp.status_code, 404)
def test_fulltree(self):
resp = self.get('/albums/%s/?fulltree=1' % self.album_pk)
nose.eq_(resp.status_code, 200)
def test_album_creation(self):
resp = self.post('/albums/', data=self.get_payload())
nose.eq_(resp.status_code, 201)
_resp = self.post('/albums/', data=self.get_payload())
# Albums with the same name for the same artist are allowed.
nose.eq_(_resp.status_code, 201)
_resp = self.post('/albums/', data={'name': ''})
# But albums without name are not allowed.
nose.eq_(_resp.status_code, 400)
def test_album_update(self):
url = '/albums/%s/' % self.album.pk
old_name = self.album.name
resp = self.put(url, data={'name': 'Rock no more'})
nose.ok_(resp.status_code, 204)
resp = self.get(url)
nose.ok_(resp.status_code, 200)
nose.ok_(resp.json['name'] != old_name)
resp = self.put(url, data={'name': ''})
nose.ok_(resp.status_code, 400)
def test_album_delete(self):
resp = self.post('/albums/', data={'name': 'derp'})
nose.eq_(resp.status_code, 201)
album_url = '/albums/%s/' % resp.json['id']
resp = self.delete(album_url)
nose.eq_(resp.status_code, 204)
resp = self.get(album_url)
nose.eq_(resp.status_code, 404)
resp = self.delete(album_url)
nose.eq_(resp.status_code, 404)
|
nilq/baby-python
|
python
|
"""
train.py
Entry point for training Hasse diagrams.
"""
from ehreact.train import calculate_diagram
def train(args):
"""
Computes a Hasse diagram based on the inputted arguments
Parameters
----------
args: Namespace
Namespace of arguments.
"""
if not args.quiet:
print(args)
# Read in positive data:
with open(args.data_path) as f:
smiles = f.read().splitlines()
_ = calculate_diagram(
smiles=smiles,
verbose=args.verbose,
quiet=args.quiet,
compute_aam=args.compute_aam,
save_path=args.save_path,
save_plot=args.save_plot,
train_mode=args.train_mode,
seed=args.seed,
no_props=args.no_props,
plot_only_branches=args.plot_only_branches,
temp_dir_img=args.temp_dir_img,
)
|
nilq/baby-python
|
python
|
# grid relative
from .fl_controller import FLController
processes = FLController()
|
nilq/baby-python
|
python
|
'''
Nome: Andre Devay Torres Gomes
NUSP: 10770089
'''
# Função principal que roda a interface e chama as outras funções
def main():
global solucao
n = int(input('Digite o número N (entre 4 e 26) de rainhas que deseja no tabuleiro NxN :'))
matriz = []
solucao = []
for i in range(n):
matriz.append([' ']*(n))
resposta(matriz, 0, n)
print('-------------------------------------------------------------')
print('Soluções em coordenadas de tabuleiro (de 1 a 26 e de A a Z):')
print()
print()
for item in solucao:
print (item)
print()
print("Total de soluções possíveis = {}".format(len(solucao)))
# Procura uma solução ao problema da N-Queens (usando recursividade)
def resposta(matriz, col, n):
if col >= n:
return
for w in range(n):
if checagem_espacos(matriz, w, col, n):
matriz[w][col] = 1
if col == n - 1:
salvar(matriz)
matriz[w][col] = 0
return
resposta(matriz, col + 1, n)
matriz[w][col] = 0
# Confere se é possível colocar uma rainha em determinado espaço sem quebrar regras do jogo (chamada dentro de resposta() )
def checagem_espacos(matriz, lin, col, n):
for colX in range(col):
if matriz[lin][colX] == 1:
return False
linX = lin
colX = col
# Diagonal (1)
while linX >= 0 and colX >= 0:
if matriz[linX][colX] == 1:
return False
linX = linX - 1
colX = colX - 1
linS = lin
colS = col
# Diagonal (2)
while linS < n and colS >= 0:
if matriz[linS][colS] == 1:
return False
linS = linS + 1
colS = colS - 1
return True
# Converte 1 solução achada em coordenadas e, em seguida, a salva na lista de soluções
def salvar(matriz):
global solucao
conversao = {0: 'A', 1: 'B', 2: 'C', 3: 'D', 4: 'E', 5: 'F', 6: 'G', 7: 'H', 8: 'I', 9: 'J', 10: 'K', 11: 'L', 12: 'M',
13: 'N', 14: 'O', 15: 'P', 16: 'Q', 17: 'R', 18: 'S', 19: 'T', 20: 'U', 21: 'V', 22: 'W', 23: 'X', 24: 'Y', 25: 'Z'}
parcial = []
cont1 = -1
cont2 = -1
for linha in matriz:
cont1 = cont1 + 1
for coluna in linha:
cont2 = cont2 + 1
if matriz[cont1][cont2] == 1:
stringzando = str(cont2 + 1)
ponto = conversao[cont1] + stringzando
parcial.append(ponto)
cont2 = -1
' '.join(map(str, parcial))
solucao.append(parcial)
main()
|
nilq/baby-python
|
python
|
# Copyright 2012 Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for libvirt inspector.
"""
import fixtures
import mock
from oslo_utils import units
from oslotest import base
from ceilometer.compute.virt import inspector as virt_inspector
from ceilometer.compute.virt.libvirt import inspector as libvirt_inspector
from ceilometer.compute.virt.libvirt import utils
from ceilometer import service
class FakeLibvirtError(Exception):
pass
class VMInstance(object):
id = 'ff58e738-12f4-4c58-acde-77617b68da56'
name = 'instance-00000001'
class TestLibvirtInspection(base.BaseTestCase):
def setUp(self):
super(TestLibvirtInspection, self).setUp()
conf = service.prepare_service([], [])
self.instance = VMInstance()
libvirt_inspector.libvirt = mock.Mock()
libvirt_inspector.libvirt.getVersion.return_value = 5001001
libvirt_inspector.libvirt.VIR_DOMAIN_SHUTOFF = 5
libvirt_inspector.libvirt.libvirtError = FakeLibvirtError
utils.libvirt = libvirt_inspector.libvirt
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=None):
self.inspector = libvirt_inspector.LibvirtInspector(conf)
def test_inspect_instance_stats(self):
domain = mock.Mock()
domain.info.return_value = (0, 0, 0, 2, 999999)
domain.memoryStats.return_value = {'available': 51200,
'unused': 25600,
'rss': 30000,
'swap_in': 5120,
'swap_out': 8192}
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
conn.domainListGetStats.return_value = [({}, {
'cpu.time': 999999,
'vcpu.maximum': 4,
'vcpu.current': 2,
'vcpu.0.time': 10000,
'vcpu.0.wait': 10000,
'vcpu.2.time': 10000,
'vcpu.2.wait': 10000,
'perf.cmt': 90112,
'perf.cpu_cycles': 7259361,
'perf.instructions': 8815623,
'perf.cache_references': 74184,
'perf.cache_misses': 16737,
'perf.mbmt': 1892352,
'perf.mbml': 1802240})]
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
stats = self.inspector.inspect_instance(self.instance, None)
self.assertEqual(2, stats.cpu_number)
self.assertEqual(40000, stats.cpu_time)
self.assertEqual(90112, stats.cpu_l3_cache_usage)
self.assertEqual(25600 / units.Ki, stats.memory_usage)
self.assertEqual(30000 / units.Ki, stats.memory_resident)
self.assertEqual(5120 / units.Ki, stats.memory_swap_in)
self.assertEqual(8192 / units.Ki, stats.memory_swap_out)
self.assertEqual(1892352, stats.memory_bandwidth_total)
self.assertEqual(1802240, stats.memory_bandwidth_local)
self.assertEqual(7259361, stats.cpu_cycles)
self.assertEqual(8815623, stats.instructions)
self.assertEqual(74184, stats.cache_references)
self.assertEqual(16737, stats.cache_misses)
def test_inspect_instance_stats_fallback_cpu_time(self):
domain = mock.Mock()
domain.info.return_value = (0, 0, 0, 2, 20000)
domain.memoryStats.return_value = {'available': 51200,
'unused': 25600,
'rss': 30000}
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
conn.domainListGetStats.return_value = [({}, {
'vcpu.current': 2,
'vcpu.maximum': 4,
'vcpu.0.time': 10000,
'vcpu.1.time': 10000,
'cpu.time': 999999})]
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
stats = self.inspector.inspect_instance(self.instance)
self.assertEqual(2, stats.cpu_number)
self.assertEqual(999999, stats.cpu_time)
def test_inspect_cpus_with_domain_shutoff(self):
domain = mock.Mock()
domain.info.return_value = (5, 0, 0, 2, 999999)
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
self.assertRaises(virt_inspector.InstanceShutOffException,
self.inspector.inspect_instance,
self.instance, None)
def test_inspect_vnics(self):
dom_xml = """
<domain type='kvm'>
<devices>
<!-- NOTE(dprince): interface with no target -->
<interface type='bridge'>
<mac address='fa:16:3e:93:31:5a'/>
<source bridge='br100'/>
<model type='virtio'/>
<address type='pci' domain='0x0000' bus='0x00' \
slot='0x03' function='0x0'/>
</interface>
<!-- NOTE(dprince): interface with no mac -->
<interface type='bridge'>
<source bridge='br100'/>
<target dev='foo'/>
<model type='virtio'/>
<address type='pci' domain='0x0000' bus='0x00' \
slot='0x03' function='0x0'/>
</interface>
<interface type='bridge'>
<mac address='fa:16:3e:71:ec:6d'/>
<source bridge='br100'/>
<target dev='vnet0'/>
<filterref filter=
'nova-instance-00000001-fa163e71ec6d'>
<parameter name='DHCPSERVER' value='10.0.0.1'/>
<parameter name='IP' value='10.0.0.2'/>
<parameter name='PROJMASK' value='255.255.255.0'/>
<parameter name='PROJNET' value='10.0.0.0'/>
</filterref>
<alias name='net0'/>
</interface>
<interface type='bridge'>
<mac address='fa:16:3e:71:ec:6e'/>
<source bridge='br100'/>
<target dev='vnet1'/>
<filterref filter=
'nova-instance-00000001-fa163e71ec6e'>
<parameter name='DHCPSERVER' value='192.168.0.1'/>
<parameter name='IP' value='192.168.0.2'/>
<parameter name='PROJMASK' value='255.255.255.0'/>
<parameter name='PROJNET' value='192.168.0.0'/>
</filterref>
<alias name='net1'/>
</interface>
<interface type='bridge'>
<mac address='fa:16:3e:96:33:f0'/>
<source bridge='qbr420008b3-7c'/>
<target dev='vnet2'/>
<model type='virtio'/>
<address type='pci' domain='0x0000' bus='0x00' \
slot='0x03' function='0x0'/>
</interface>
</devices>
</domain>
"""
interface_stats = {
'vnet0': (1, 2, 21, 22, 3, 4, 23, 24),
'vnet1': (5, 6, 25, 26, 7, 8, 27, 28),
'vnet2': (9, 10, 29, 30, 11, 12, 31, 32),
}
interfaceStats = interface_stats.__getitem__
domain = mock.Mock()
domain.XMLDesc.return_value = dom_xml
domain.info.return_value = (0, 0, 0, 2, 999999)
domain.interfaceStats.side_effect = interfaceStats
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
interfaces = list(self.inspector.inspect_vnics(
self.instance, None))
self.assertEqual(3, len(interfaces))
vnic0 = interfaces[0]
self.assertEqual('vnet0', vnic0.name)
self.assertEqual('fa:16:3e:71:ec:6d', vnic0.mac)
self.assertEqual('nova-instance-00000001-fa163e71ec6d', vnic0.fref)
self.assertEqual('255.255.255.0', vnic0.parameters.get('projmask'))
self.assertEqual('10.0.0.2', vnic0.parameters.get('ip'))
self.assertEqual('10.0.0.0', vnic0.parameters.get('projnet'))
self.assertEqual('10.0.0.1', vnic0.parameters.get('dhcpserver'))
self.assertEqual(1, vnic0.rx_bytes)
self.assertEqual(2, vnic0.rx_packets)
self.assertEqual(3, vnic0.tx_bytes)
self.assertEqual(4, vnic0.tx_packets)
self.assertEqual(21, vnic0.rx_errors)
self.assertEqual(22, vnic0.rx_drop)
self.assertEqual(23, vnic0.tx_errors)
self.assertEqual(24, vnic0.tx_drop)
vnic1 = interfaces[1]
self.assertEqual('vnet1', vnic1.name)
self.assertEqual('fa:16:3e:71:ec:6e', vnic1.mac)
self.assertEqual('nova-instance-00000001-fa163e71ec6e', vnic1.fref)
self.assertEqual('255.255.255.0', vnic1.parameters.get('projmask'))
self.assertEqual('192.168.0.2', vnic1.parameters.get('ip'))
self.assertEqual('192.168.0.0', vnic1.parameters.get('projnet'))
self.assertEqual('192.168.0.1', vnic1.parameters.get('dhcpserver'))
self.assertEqual(5, vnic1.rx_bytes)
self.assertEqual(6, vnic1.rx_packets)
self.assertEqual(7, vnic1.tx_bytes)
self.assertEqual(8, vnic1.tx_packets)
self.assertEqual(25, vnic1.rx_errors)
self.assertEqual(26, vnic1.rx_drop)
self.assertEqual(27, vnic1.tx_errors)
self.assertEqual(28, vnic1.tx_drop)
vnic2 = interfaces[2]
self.assertEqual('vnet2', vnic2.name)
self.assertEqual('fa:16:3e:96:33:f0', vnic2.mac)
self.assertIsNone(vnic2.fref)
self.assertEqual(
{'interfaceid': None, 'bridge': 'qbr420008b3-7c'},
vnic2.parameters)
self.assertEqual(9, vnic2.rx_bytes)
self.assertEqual(10, vnic2.rx_packets)
self.assertEqual(11, vnic2.tx_bytes)
self.assertEqual(12, vnic2.tx_packets)
self.assertEqual(29, vnic2.rx_errors)
self.assertEqual(30, vnic2.rx_drop)
self.assertEqual(31, vnic2.tx_errors)
self.assertEqual(32, vnic2.tx_drop)
def test_inspect_vnics_with_domain_shutoff(self):
domain = mock.Mock()
domain.info.return_value = (5, 0, 0, 2, 999999)
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
inspect = self.inspector.inspect_vnics
self.assertRaises(virt_inspector.InstanceShutOffException,
list, inspect(self.instance, None))
def test_inspect_disks(self):
dom_xml = """
<domain type='kvm'>
<devices>
<disk type='file' device='disk'>
<driver name='qemu' type='qcow2' cache='none'/>
<source file='/path/instance-00000001/disk'/>
<target dev='vda' bus='virtio'/>
<alias name='virtio-disk0'/>
<address type='pci' domain='0x0000' bus='0x00'
slot='0x04' function='0x0'/>
</disk>
</devices>
</domain>
"""
blockStatsFlags = {'wr_total_times': 91752302267,
'rd_operations': 6756,
'flush_total_times': 1310427331,
'rd_total_times': 29142253616,
'rd_bytes': 171460096,
'flush_operations': 746,
'wr_operations': 1437,
'wr_bytes': 13574656}
domain = mock.Mock()
domain.XMLDesc.return_value = dom_xml
domain.info.return_value = (0, 0, 0, 2, 999999)
domain.blockStats.return_value = (1, 2, 3, 4, -1)
domain.blockStatsFlags.return_value = blockStatsFlags
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
disks = list(self.inspector.inspect_disks(self.instance, None))
self.assertEqual(1, len(disks))
self.assertEqual('vda', disks[0].device)
self.assertEqual(1, disks[0].read_requests)
self.assertEqual(2, disks[0].read_bytes)
self.assertEqual(3, disks[0].write_requests)
self.assertEqual(4, disks[0].write_bytes)
self.assertEqual(91752302267, disks[0].wr_total_times)
self.assertEqual(29142253616, disks[0].rd_total_times)
def test_inspect_disks_with_domain_shutoff(self):
domain = mock.Mock()
domain.info.return_value = (5, 0, 0, 2, 999999)
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
inspect = self.inspector.inspect_disks
self.assertRaises(virt_inspector.InstanceShutOffException,
list, inspect(self.instance, None))
def test_inspect_disk_info(self):
dom_xml = """
<domain type='kvm'>
<devices>
<disk type='file' device='disk'>
<driver name='qemu' type='qcow2' cache='none'/>
<source file='/path/instance-00000001/disk'/>
<target dev='vda' bus='virtio'/>
<alias name='virtio-disk0'/>
<address type='pci' domain='0x0000' bus='0x00'
slot='0x04' function='0x0'/>
</disk>
</devices>
</domain>
"""
domain = mock.Mock()
domain.XMLDesc.return_value = dom_xml
domain.blockInfo.return_value = (1, 2, 3, -1)
domain.info.return_value = (0, 0, 0, 2, 999999)
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
disks = list(self.inspector.inspect_disk_info(
self.instance, None))
self.assertEqual(1, len(disks))
self.assertEqual('vda', disks[0].device)
self.assertEqual(1, disks[0].capacity)
self.assertEqual(2, disks[0].allocation)
self.assertEqual(3, disks[0].physical)
def test_inspect_disk_info_network_type(self):
dom_xml = """
<domain type='kvm'>
<devices>
<disk type='network' device='disk'>
<driver name='qemu' type='qcow2' cache='none'/>
<source file='/path/instance-00000001/disk'/>
<target dev='vda' bus='virtio'/>
<alias name='virtio-disk0'/>
<address type='pci' domain='0x0000' bus='0x00'
slot='0x04' function='0x0'/>
</disk>
</devices>
</domain>
"""
domain = mock.Mock()
domain.XMLDesc.return_value = dom_xml
domain.blockInfo.return_value = (1, 2, 3, -1)
domain.info.return_value = (0, 0, 0, 2, 999999)
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
disks = list(self.inspector.inspect_disk_info(self.instance, None))
self.assertEqual(1, len(disks))
def test_inspect_disk_info_without_source_element(self):
dom_xml = """
<domain type='kvm'>
<devices>
<disk type='file' device='cdrom'>
<driver name='qemu' type='raw' cache='none'/>
<backingStore/>
<target dev='hdd' bus='ide' tray='open'/>
<readonly/>
<alias name='ide0-1-1'/>
<address type='drive' controller='0' bus='1'
target='0' unit='1'/>
</disk>
</devices>
</domain>
"""
domain = mock.Mock()
domain.XMLDesc.return_value = dom_xml
domain.blockInfo.return_value = (1, 2, 3, -1)
domain.info.return_value = (0, 0, 0, 2, 999999)
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
disks = list(self.inspector.inspect_disk_info(self.instance, None))
self.assertEqual(0, len(disks))
def test_inspect_disks_without_source_element(self):
dom_xml = """
<domain type='kvm'>
<devices>
<disk type='file' device='cdrom'>
<driver name='qemu' type='raw' cache='none'/>
<backingStore/>
<target dev='hdd' bus='ide' tray='open'/>
<readonly/>
<alias name='ide0-1-1'/>
<address type='drive' controller='0' bus='1'
target='0' unit='1'/>
</disk>
</devices>
</domain>
"""
blockStatsFlags = {'wr_total_times': 91752302267,
'rd_operations': 6756,
'flush_total_times': 1310427331,
'rd_total_times': 29142253616,
'rd_bytes': 171460096,
'flush_operations': 746,
'wr_operations': 1437,
'wr_bytes': 13574656}
domain = mock.Mock()
domain.XMLDesc.return_value = dom_xml
domain.info.return_value = (0, 0, 0, 2, 999999)
domain.blockStats.return_value = (1, 2, 3, 4, -1)
domain.blockStatsFlags.return_value = blockStatsFlags
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
disks = list(self.inspector.inspect_disks(self.instance, None))
self.assertEqual(0, len(disks))
def test_inspect_memory_usage_with_domain_shutoff(self):
domain = mock.Mock()
domain.info.return_value = (5, 0, 51200, 2, 999999)
conn = mock.Mock()
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
self.assertRaises(virt_inspector.InstanceShutOffException,
self.inspector.inspect_instance,
self.instance, None)
def test_inspect_memory_with_empty_stats(self):
domain = mock.Mock()
domain.info.return_value = (0, 0, 51200, 2, 999999)
domain.memoryStats.return_value = {}
conn = mock.Mock()
conn.domainListGetStats.return_value = [({}, {})]
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
stats = self.inspector.inspect_instance(self.instance, None)
self.assertIsNone(stats.memory_usage)
self.assertIsNone(stats.memory_resident)
self.assertIsNone(stats.memory_swap_in)
self.assertIsNone(stats.memory_swap_out)
def test_inspect_memory_with_usable(self):
domain = mock.Mock()
domain.info.return_value = (0, 0, 0, 2, 999999)
domain.memoryStats.return_value = {'available': 76800,
'rss': 30000,
'swap_in': 5120,
'swap_out': 8192,
'unused': 25600,
'usable': 51200}
conn = mock.Mock()
conn.domainListGetStats.return_value = [({}, {})]
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
stats = self.inspector.inspect_instance(self.instance, None)
self.assertEqual(25600 / units.Ki, stats.memory_usage)
self.assertEqual(30000 / units.Ki, stats.memory_resident)
self.assertEqual(5120 / units.Ki, stats.memory_swap_in)
self.assertEqual(8192 / units.Ki, stats.memory_swap_out)
def test_inspect_perf_events_libvirt_less_than_2_3_0(self):
domain = mock.Mock()
domain.info.return_value = (0, 0, 51200, 2, 999999)
domain.memoryStats.return_value = {'rss': 0,
'available': 51200,
'unused': 25600}
conn = mock.Mock()
conn.domainListGetStats.return_value = [({}, {})]
conn.lookupByUUIDString.return_value = domain
with mock.patch('ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection', return_value=conn):
stats = self.inspector.inspect_instance(self.instance, None)
self.assertIsNone(stats.cpu_l3_cache_usage)
self.assertIsNone(stats.memory_bandwidth_total)
self.assertIsNone(stats.memory_bandwidth_local)
self.assertIsNone(stats.cpu_cycles)
self.assertIsNone(stats.instructions)
self.assertIsNone(stats.cache_references)
self.assertIsNone(stats.cache_misses)
class TestLibvirtInspectionWithError(base.BaseTestCase):
def setUp(self):
super(TestLibvirtInspectionWithError, self).setUp()
conf = service.prepare_service([], [])
self.useFixture(fixtures.MonkeyPatch(
'ceilometer.compute.virt.libvirt.utils.'
'refresh_libvirt_connection',
mock.MagicMock(side_effect=[None, Exception('dummy')])))
libvirt_inspector.libvirt = mock.Mock()
libvirt_inspector.libvirt.libvirtError = FakeLibvirtError
utils.libvirt = libvirt_inspector.libvirt
self.inspector = libvirt_inspector.LibvirtInspector(conf)
def test_inspect_unknown_error(self):
self.assertRaises(virt_inspector.InspectorException,
self.inspector.inspect_instance, 'foo', None)
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.