text
stringlengths
29
850k
""" A tree module for pforest. GNU GENERAL PUBLIC LICENSE Version 2 Created on Thu Oct 16 17:33:47 2014 @author: Wasit """ import numpy as np try: __import__('imp').find_module('pforest') print "Found pforest" from pforest.master import mnode except ImportError: print "Not found pforest. Importing local modules" from master import mnode class tree(mnode): """ A tree class that represent the tree of the random forest. """ def settree(self,root=mnode(0,0,0)): """ Initialize the tree with training result. """ self.theta=root.theta #vector array self.tau=root.tau #scalar self.H=root.H #scalar self.P=root.P #vector array self.parent=root.parent #mnode self.depth=root.depth #int self.char=root.char self.Q=root.Q if root.L is not None: self.L=tree() #mnode self.L.settree(root.L) self.R=tree() #mnode self.R.settree(root.R) def classify(self,Ix): """ Classify input Ix by the decision tree. """ if self.tau is None: #reaching terminal node return self.P else: if(Ix[ int(self.theta) ]<self.tau): return self.L.classify(Ix) else: return self.R.classify(Ix) def getP(self,x,dset): """ Return the probability. input: x sample index [int] dset the dataset object output: P [1d ndarray] probability P(L|Ix) """ #print("test>>mnode:{}".format(self)) if self.tau is None:#reaching terminal node return self.P else: #if (self.L is not None and goLeft) : if (dset.getI(self.theta,x)<self.tau) : return self.L.getP(x,dset) else: return self.R.getP(x,dset) def getL(self,x,dset): """ input: x sample index [int] dset the dataset object output: L [integer] label """ return np.argmax(self.getP(x,dset)) def show(self): """Output this tree on standard output.""" print self.table() if __name__ == '__main__': import pickle from matplotlib import pyplot as plt try: __import__('imp').find_module('pforest') print "Found pforest" from pforest.dataset import dataset except ImportError: print "Not found pforest. Importing local modules" from dataset import dataset #from scmaster import master # #training # m=master() # m.reset() # m.train() # print m.root.table() # #recording the tree pickle file # pickleFile = open('out/root.pic', 'wb') # pickle.dump(m.root, pickleFile, pickle.HIGHEST_PROTOCOL) # pickleFile.close() #reading the tree pickle file pickleFile = open('out_tree.pic', 'rb') root = pickle.load(pickleFile) pickleFile.close() #init the test tree t=tree() t.settree(root) t.show() #compute recall rate dset=dataset() correct=0; for x in xrange(dset.size): L=t.getL(np.array([x]),dset) if dset.getL(x) == L: correct=correct+1 dset.setL(x,L) print("recall rate: {}%".format(correct/float(dset.size)*100)) #setup the new test-set d=0.01 y, x = np.mgrid[slice(-1, 1+d, d), slice(-1, 1+d, d)] #create dataset dset2=dataset() #start labeling L=np.zeros(x.shape,dtype=int) for r in xrange(x.shape[0]): for c in xrange(x.shape[1]): Prob=t.classify(( x[r,c],y[r,c] )) L[r,c]=np.argmax(Prob) #plot the lalbel out put plt.close('all') plt.axis([-1,1,-1,1]) plt.pcolor(x,y,L) # plt.show() #overlaying new input data plt.hold(True) plt.set_cmap('jet') marker=['bo','co','go','ro','mo','yo','ko', 'bs','cs','gs','rs','ms','ys','ks'] z=np.random.randint(0,dset.size,1000) for i in z: plt.plot(dset2.I[i,0],dset2.I[i,1],marker[dset2.samples[i]]) plt.show()
On November 5, 2018, NASBITE International opened team registration for the 2019 Student Case Competition, happening April 8-9, 2019, two days prior to the 32nd Annual Conference and one day prior to the 3rd Annual Small Business Exporter Summit in Savannah, GA. Up to eighteen teams will be competing. Teams are selected on a first-come, first-served basis. All team registration forms and payments must be received no later than February 1, 2019 in order to receive the written case on February 15, 2019.
import json from Send_formatter import Send_formatter from src.networking.IP_Parser import IP_Parser from Exceptions.Table_lookup_failed_exception import Table_lookup_failed_exception from src.Encoder import Encoder from src.Search.Search_results import Search_results from src.Search.Database import Database import threading class Message_handler(object): def __init__(self, table, socket): self.parser = IP_Parser() self.table = table self.socket = socket self.__send_formatter = Send_formatter(self.table) self.__encoder = Encoder() self.__db = Database() self.setup_lock_event = threading.Event() def handle(self, data, sender_addr): message = json.loads(data) message_type = message["type"] if not self.__valid_message(message_type): print "Warning - Malformed message received" return print "Received message - " + message_type + " from " + str(sender_addr[0]) + ":" + str(sender_addr[1]) print message if message_type == "JOINING_NETWORK": self.__handle_joining_network(message, sender_addr) if message_type == "ROUTING_INFO": self.setup_lock_event.set() self.__handle_routing_info(message, sender_addr) if message_type == "JOINING_NETWORK_RELAY": self.__handle_joining_network_relay(message) if message_type == "SEARCH_RESPONSE": self.__handle_search_response(message) if message_type == "SEARCH": self.__handle_search(message) if message_type == "INDEX": self.__handle_index(message) def __valid_message(self, message_type): return message_type def join_network(self, bootstrap_ip): to_send = self.__send_formatter.send_joining_network() self.send_message(to_send, bootstrap_ip.get_ip_pair()) def __handle_routing_info(self, message, sender_addr): self.table.load_routing_info(message["route_table"]) if self.__sender_is_gateway(message, sender_addr): self.table.add_routing_info(message["gateway_id"], message["ip_address"]) self.__forward_routing_info_if_necessary(message) def __sender_is_gateway(self, message, sender_addr): msg_ip = str(message["ip_address"]) parsed_sender_addr = str(self.parser.parse_from_tuple(sender_addr)) return msg_ip == parsed_sender_addr def __forward_routing_info_if_necessary(self, message): node_id = message["node_id"] gateway_id = message["gateway_id"] if not self.__id_is_me(node_id): if self.__id_is_me(gateway_id): ip = self.__normalise_ip_to_pair(node_id) jsoned = json.dumps(message) self.send_message(jsoned, ip) else: print "Error - Expecting to forward routing info but I am not gateway" return def __id_is_me(self, node_id): return int(node_id) == int(self.table.node_id) def __handle_joining_network_relay(self, message): gateway_id = message["gateway_id"] node_id = message["node_id"] if self.__id_is_me(node_id): return self.__forward_message_to_closest_node(message, node_id) if not self.__id_is_me(gateway_id): new_known_ip = self.table.get_ip_of_node(gateway_id) self.table.add_routing_info(node_id, new_known_ip) to_send = self.__send_formatter.send_routing_info(node_id, gateway_id) self.send_to_node_id(to_send, gateway_id) def send_to_node_id(self, message, node_id): ip = self.__normalise_ip_to_pair(node_id) self.send_message(message, ip) def __handle_joining_network(self, message, sender_addr): node_id = message["node_id"] node_ip = message["ip_address"] to_send_forward = self.__send_formatter.send_joining_network_relay(node_id) self.__forward_message_to_closest_node(to_send_forward, node_id) to_send = self.__send_formatter.send_routing_info(node_id, self.table.node_id) self.table.add_routing_info(node_id, node_ip) self.send_message(to_send, sender_addr) def send_message(self, message, sender_addr): sender_ip = str(sender_addr[0]) sender_port = str(sender_addr[1]) loaded = json.loads(message) print "Sending " + loaded["type"] + " to " + sender_ip + ":" + sender_port print message self.socket.sendto(message, sender_addr) def __normalise_ip_to_pair(self, node_id): try: node_ip = self.table.get_ip_of_node(node_id) except KeyError: print "----------- Error - Could not find ip of node " + str(node_id) raise Table_lookup_failed_exception("Could not find ip for id " + str(node_id)) normalised_ip = self.parser.parse(node_ip).get_ip_pair() return normalised_ip def search(self, words): for word in words: hash_of_word = self.__encoder.get_hash_of_word(word) print "Hash is " + str(hash_of_word) closest_node = self.table.get_closest_node_id(hash_of_word) if closest_node: message = self.__send_formatter.search(word, closest_node) ip = self.__normalise_ip_to_pair(closest_node) self.send_message(message, ip) else: return self.__db.get_results(word) #TODO handle pings def __handle_search(self, message): word = message["word"] target_node_id = message["sender_id"] results = self.__db.get_results(word) message = self.__send_formatter.search_response(word, target_node_id, results) self.__forward_message_to_closest_node(message, target_node_id) def __handle_search_response(self, message): node_id = message["node_id"] if self.__id_is_me(node_id): word = message["word"] responses = message["response"] search_result = Search_results(word, responses) print "RECEIVED RESPONSE FOR " + search_result.word print "Results:" for result in search_result.results: print "Url:\t\t" + str(result["url"]) print "Rank:\t\t" + str(result["rank"]) return search_result else: self.__forward_message_to_closest_node(message, node_id) def __handle_index(self, message): target_id = message["target_id"] if self.__id_is_me(target_id): self.__send_ack(target_id) word = message["keyword"] urls = message["link"] self.__db.index_results(word, urls) else: self.__forward_message_to_closest_node(message, target_id) def __send_ack(self, target_id): message = self.__send_formatter.ack(target_id) ip = self.__normalise_ip_to_pair(target_id) self.send_message(message, ip) def index(self, keyword, link): hash_of_word = self.__encoder.get_hash_of_word(keyword) message = self.__send_formatter.index(hash_of_word, keyword, link) self.__forward_message_to_closest_node(message, hash_of_word) loaded = json.loads(message) word = loaded["keyword"] urls = loaded["link"] self.__db.index_results(word, urls) def __forward_message_to_closest_node(self, message, node_id): if type(message) is dict: message = json.dumps(message) closest_node = self.table.get_closest_node_id(node_id) if closest_node: self.__send_message(message, closest_node) def __send_message(self, message, node_id): ip = self.__normalise_ip_to_pair(node_id) self.send_message(message, ip)
Our IT Security team is made of senior experienced individuals certified CISSP, CISA and various other industry leading designations. We can help you align your organization with major frameworks such as Isaca COBIT 5, NIST, ISACA IT Risk Framework, Val IT, COSO and others. We understand your organization may have government regulations to follow such as GLBA, HIPAA, PCI-DSS, FISMA and others such as IIROC, the Privacy Act, PHIPA and PIPEDA. We do more than simply run tests on your networks. Anyone can run tests with various programs, deliver a report and look good. We go further. At Marketbridge, we'll review the platforms, the operating systems, the known bugs and defects. We'll see how far we can go. We have. As an example, we once uncovered a serious flaw with a Data Loss Prevention platform (from a well-known vendor) allowing us to directly modify information in its database completely compromising this system's integrity. Given a real world data loss case with this system, it would affect any attempt to seek damages in a court of law. No stone will be left unturned. Our reports will provide you with more than just a list of of open ports and vulnerabilities. We will provide you detailed information about your infrastructure and recommendations. © 2016 Marketbridge Tech., Inc.
def messageBox(text, align='center', textWidth=274): font = parole.resource.getFont("fonts/Arial.ttf", 14) block = parole.shader.TextBlockPass(font, (255,255,255), wrap_width=textWidth, bg_rgb=(0,64,128), align=align, wrap='word') block.text = text block.update() sdr = parole.shader.Shader("FrameContents", (block.width+20, block.height+20)) sdr.addPass(parole.shader.ColorField((0,64,128), sdr.size)) sdr.addPass(block, (10,10)) mbox = parole.shader.Frame((parole.shader.VerticalBevel((0,0,0), (128,128,128), (255,255,255),1, 2, 1), parole.shader.VerticalBevel((0,0,0), (128,129,128), (255,255,255), 1, 2, 1), parole.shader.HorizontalBevel((255,255,255), (128,128,128), (0,0,0), 1, 2, 1), parole.shader.HorizontalBevel((255,255,255), (128,128,128), (0,0,0), 1, 2, 1), None,None,None,None), contents=[sdr]) mbox.update() parole.display.scene.add(mbox, pos=mbox.centeredPos()) return mbox
Based on a comparison of 34 countries in 2016, Italy ranked the highest in Permanent crops for human consumption - Harvested production with 28,013 kt followed by Turkey and Spain. On the other end of the scale was Estonia with 3.64 kt, Malta with 5.72 kt and Finland with 9.50 kt. Buy Permanent crops for human consumption - Harvested production data for all countries.
import os import time import json from slackclient import SlackClient ''' Welcome to the code! This is the main class for the hr_bot. This code sucks: spaghetti, mixed, hardcoded and etc. So if you are a very opinionated person I am accepting refactoring PRs :) Go ahead, have fun! ''' # starterbot's ID as an environment variable BOT_ID = os.environ["HRBOT_ID"] BOT_TOKEN = os.environ["HRBOT_TOKEN"] # constants AT_BOT = "<@" + BOT_ID + ">" DICT_USER = {} NUMBER_OF_REACTIONS_INT = 3 NUMBER_OF_REACTIONS = str(NUMBER_OF_REACTIONS_INT) INITIAL_SCORE = 0 OPERATION_TIMEOUT = 300 #5 minutes REBUKE_COMMAND = "boo" CONGRAT_COMMAND = "kudos" HELP_COMMAND = "help" LEADER_BOARD_COMMAND = "leaderboard" ERROR_SUFFIX = ". Type `@hr help` for instructions" NOT_FOUND_MSGS = ['Not sure what you meant. I am still being coded! Sorry :pensive:','I am very busy right now! Maybe after a :coffee:', 'Nope'] INSTRUCTIONS_MSG = "Hi there! my name is HR. I can listen to complaints or praise between coworkers. You can raise a complaint by using the *" + REBUKE_COMMAND +"*"\ " command or praise someone by using the *"+CONGRAT_COMMAND+"* command. Just tell me: `@hr "+CONGRAT_COMMAND+" @aric 200 He helped me with my computer` "\ " If your message gets 3 OR + votes _@aric_ gets 200 points. On the contrary if you tell me: `@hr "+REBUKE_COMMAND+" @aric 500 he said the b word at lunch `"\ " If your message gets 3 OR + votes _@aric_ losses 500 points. :warning: if you don't get enough votes you may loose some points!"\ " Type `@hr "+LEADER_BOARD_COMMAND+"` to get the top 5 worst employees in the HR score." slack_client = SlackClient(BOT_TOKEN) list_of_operations = [] class HR_Operation: def __init__(self, author, isPositive, target, amount, reason, channel, timestamp): self.author = clean_up_user_name(author) self.isPositive = isPositive self.target = clean_up_user_name(target) self.amount = amount self.reason = reason self.channel = channel self.timestamp = timestamp self.votes = [] self.processed = False def addVote(vote): self.votes.append(vote) class MSG_Votes: def __init__(self, reaction, channel, userReacting, msg_ts, msg_author): self.reaction = reaction self.channel = channel self.userReacting = clean_up_user_name(userReacting) self.msg_ts = msg_ts self.msg_author = clean_up_user_name(msg_author) def publish(text,channel): slack_client.api_call("chat.postMessage", channel=channel, text=text, as_user=True) def handle_command(hr_operation): #We need to filter the rules before actually applying this #Cannot allow that you upvote or downvote yourself if hr_operation.target == hr_operation.author: apply_point(False,10,hr_operation.target) publish("Are you serious? Do you think I don't have an if statement for this? -10pts for you <@"+hr_operation.author+">, cheater", hr_operation.channel) elif hr_operation.target == BOT_ID and not hr_operation.isPositive: apply_point(False,150,hr_operation.target) publish("hahah you think you are so funny... -150pts for you <@"+hr_operation.author+">. Report me to HR.... try it", hr_operation.channel) else: list_of_operations.append(hr_operation) response = "If you get at least *"+NUMBER_OF_REACTIONS+"* reactions, consider it done!" publish(response, hr_operation.channel) def handle_reaction(vote): #Look for the operation and add vote if found if len(list_of_operations) > 0: for op in list_of_operations: # check if the vote is for the operation if op.timestamp == vote.msg_ts and op.author == vote.msg_author and vote.channel == op.channel: if vote.msg_author == vote.userReacting: apply_point(False,10,vote.msg_author) publish("You can't vote, you sneaky cheater! -10pts for you <@"+vote.msg_author+">", vote.channel) return if op.target == vote.userReacting and op.isPositive: apply_point(False,10, op.target) publish("Hey, what do you think I am? An empty robot? You cannot vote for yourself, cheater! -10pts for you <@"+vote.userReacting+">", vote.channel) return for op_vote in op.votes: if vote.userReacting == op_vote.userReacting: apply_point(False,10, vote.userReacting) publish("Hey <@"+vote.userReacting+">, you can't vote twice, cheater! -10pts for you ", vote.channel) return op.votes.append(vote) refresh_leaderboard() def process_pending_operations(): for op in list_of_operations: if not op.processed and (time.time() - float(op.timestamp)) > OPERATION_TIMEOUT: penalty_points = 10 msg = "" if op.isPositive: penalty_points = 5 msg = "Not enough votes, <@"+op.author+">. Next time try to get some traction. I have to take 5 points because of RAM/CPU wasted time. Good luck next time. :smile: " else: penalty_points = 10 msg = "You didn't get traction, dude. Are you falsely accusing your coworker -10 pts for you <@"+op.author+">." apply_point(False,penalty_points,op.author) op.processed = True publish(msg, op.channel) def refresh_leaderboard(): for op in list_of_operations: if len(op.votes) == NUMBER_OF_REACTIONS_INT and not op.processed: apply_point(op.isPositive, op.amount, op.target) op.processed = True msg = "The people had spoken. <@"+op.target+"> has *"+op.amount+"* "+(" more " if op.isPositive else " less ")+" points" publish(msg, op.channel) def apply_point(increment, amount, user): if not user in DICT_USER: DICT_USER[user] = INITIAL_SCORE if increment: DICT_USER[user] = DICT_USER[user] + int(amount) else: DICT_USER[user] = DICT_USER[user] - int(amount) def clean_up_user_name(username): if username.find("<@") == -1 : return username username = username.replace("<@","") username = username.replace(">","") return username def handle_help(channel): publish(INSTRUCTIONS_MSG, channel) def handle_leader_board(channel): index = 1 msg = "Ok, sure sweetheart!\n" if len(DICT_USER) > 0: for key, value in DICT_USER.iteritems(): msg += str(""+str(index)+"- <@"+key+"> ---> "+str(value)+"\n") index += 1 else: msg= "I feel so lonely, no one voted yet... :crying_cat_face:" publish(msg, channel) def isUser(subStr): return subStr.startswith("<@U") def parse_txt(msg_str, channel): errorMsg = None isPositive = None target = None amount = None reason = None valid = False bySpace = msg_str.split(" ") if len(bySpace) >= 2: if bySpace[0] == AT_BOT: if bySpace[1] in [CONGRAT_COMMAND, REBUKE_COMMAND]: if bySpace[1] == CONGRAT_COMMAND: isPositive = True else: isPositive = False if isUser(bySpace[2]): target = bySpace[2] if (bySpace[3].isdigit()): amount = bySpace[3] if (len(bySpace) > 4): reason = " ".join(bySpace[4:]) valid = True else: errorMsg = "Expected the number of points not this *"+bySpace[3]+"*" else: errorMsg = "Need to put a user after the command instead of *"+bySpace[2]+"*" elif bySpace[1] == HELP_COMMAND: valid = True handle_help(channel) elif bySpace[1] == LEADER_BOARD_COMMAND: handle_leader_board(channel) valid = True else: errorMsg = "You used the wrong command *"+bySpace[1]+"*" else: errorMsg = "C'mon! You can do better than that" else: errorMsg = "At least you mentioned me :smiley:" return errorMsg, valid, target, isPositive, amount, reason def parse_msg(msg_json): channel = msg_json["channel"] if not msg_json["user"] == BOT_ID: errorMSG, valid, target, isPositive, amount, reason = parse_txt(msg_json["text"], channel) if (errorMSG): msgResponse = errorMSG + ERROR_SUFFIX publish(msgResponse,channel) elif not (isPositive == None): channel = msg_json["channel"] author = msg_json["user"] timestamp = msg_json["ts"] op = HR_Operation(author,isPositive, target, amount, reason, channel, timestamp) handle_command(op) elif not valid: msgResponse = errorMSG + ERROR_SUFFIX publish(msgResponse,channel) def parse_reaction(reaction_json): if reaction_json["item"]: if reaction_json["type"] == 'reaction_added': if 'channel' not in reaction_json["item"]: return if 'user' not in reaction_json: return if 'ts' not in reaction_json["item"]: return if 'item_user' not in reaction_json: return vote = MSG_Votes(reaction_json["reaction"], reaction_json["item"]["channel"],reaction_json["user"],reaction_json["item"]["ts"], reaction_json["item_user"]) handle_reaction(vote) def parse_slack_output(slack_rtm_output): output_list = slack_rtm_output if output_list and len(output_list) > 0: for output in output_list: if output and 'text' in output and AT_BOT in output['text']: op = parse_msg(output) return op if output and 'reaction' in output: parse_reaction(output) return None if __name__ == "__main__": READ_WEBSOCKET_DELAY = 1 if slack_client.rtm_connect(): print("Connection succesful") while True: operation = parse_slack_output(slack_client.rtm_read()) process_pending_operations() time.sleep(READ_WEBSOCKET_DELAY) else: print("Connection failed. Invalid Slack token or bot ID?")
When you’re getting ready to apply for a new healthcare job, updating your resume is probably the first step. For many people, the next step is writing and refining the perfect cover letter. However, according to a recent study from Jobvite, only about 26% of recruiters actually look at a cover letter. With new developments like the use of AI for job searching and with online application processes, the necessity of a cover letter is murky at best. So what’s the truth? Are cover letters unnecessary relics of the past, destined to go the way of paper applications? Or is there still some value in presenting a well-crafted cover letter to potential employers? Today, there are still quite a few reasons to consider including a cover letter with your application, even when appying online. Here are the top reasons to include a cover letter. Customized cover letters are a way not just to show that you’re interested in a job, but also that you’re interested in this specific job. Cover letters are also a way to expand on the points made in your resume. If you possess a particular skill or hold a license that’s relevant to the job you’re applying for, a cover letter is the perfect place to explain how you’ve used a skill in the past, and how it will apply to a new position. Additionally, your cover letter provides the opportunity to provide clarity on any unusual aspects of your resume. For example, if you’ve worked at the same hospital for several years and are looking to transition to another field or facility, a cover letter is the place to explain that. Another example might be if you have gaps or a lot of short-term positions on your resume. You can use a cover letter to explain those gaps and set the employer’s mind at rest. In a lot of fields, especially those that require a lot of writing, cover letters are usually required, as they not only explain why you’re qualified for a job, they also help show off your writing skills to an employer. Similarly If you’re working in a healthcare field that requires writing (like clinical education or similar), chances are that you’ll need to create a cover letter in order to explain why you’d be a good fit for a position. Keep in mind, a cover letter isn't just for you to show off your skills, it's also your opportunity to show how those skills can help improve the employer's business, and what you can do for them. Hospitals, clinics, and other healthcare facilities want to know they are hiring someone who can make a difference to their patients in that specific care setting. Writing a cover letter that focuses on how you can make a difference for them is an excellent reason to include it with your application. Of course, the most obvious reason to include a cover letter is because the position asks for one. A study on CareerBuilder found that 49% of hiring managers consider a cover letter the second best (after customizing your resume) skill to boost your chances of getting hired. According to that same study, 70% employers spend less than 5 minutes on a resume, and 48% spend less than two. However, many employers will actually spend a little more time reading through a cover letter, so it can help to emphasize any points on your resume they may have glossed over on the first read. If you don’t consider yourself a strong writer however, you may be in luck. There are many circumstances where a cover letter may not be necessary for you to land the job you want. Here are a few situations where a cover letter won’t be necessary. In a study conducted by the Society for Human Resource Management (SHRM), 85% of employers are actively recruiting candidates on social media. In this case, the candidates are mainly passive (meaning they aren’t actively looking to leave their jobs), so these openings typically won’t require a cover letter. When you work with a company like Relode, you have someone advocating on your behalf to the client. This means you don’t need a cover letter to “sell” your strengths to a future employer. Instead, our Relode talent advisors and account executives act as the middleman to explain your qualifications to hiring managers and executives, ensuring that your next position is a fit for you and for the employer. See how Relode can help with your job search. Browse open positions here.
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python2, python3 """DeepQNetwork models for molecule generation.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools import numpy as np from rdkit import Chem from rdkit import DataStructs from rdkit.Chem import AllChem from six.moves import range from six.moves import zip import tensorflow.compat.v1 as tf from tensorflow.contrib import layers as contrib_layers from tensorflow.contrib import training as contrib_training class DeepQNetwork(object): """Deep Q Network. This class implements the network as used in the Nature (2015) paper. Human-level control through deep reinforcement learning https://www.nature.com/articles/nature14236 https://storage.googleapis.com/deepmind-data/assets/papers/DeepMindNature14236Paper.pdf """ def __init__(self, input_shape, q_fn, learning_rate=0.001, learning_rate_decay_steps=10000, learning_rate_decay_rate=0.8, optimizer='Adam', grad_clipping=None, gamma=1.0, epsilon=0.2, double_q=True, num_bootstrap_heads=10, scope='dqn', reuse=None): """Creates the model function. Args: input_shape: Tuple. The shape of input. q_fn: A function, whose input is the observation features, and the output is the Q value of the observation. learning_rate: Float. The learning rate of the optimizer. learning_rate_decay_steps: Integer. The number of steps between each learning rate decay. learning_rate_decay_rate: Float. The rate of learning rate decay. optimizer: String. Which optimizer to use. grad_clipping: Boolean. Whether to clip gradient. gamma: Float. Discount factor. epsilon: Float. The probability of choosing a random action. double_q: Boolean. Whether to use double q learning. num_bootstrap_heads: Integer. The number of bootstrap heads to use. scope: String or VariableScope. Variable Scope. reuse: Boolean or None. Whether or not the variable should be reused. """ self.input_shape = input_shape self.q_fn = q_fn self.learning_rate = learning_rate self.learning_rate_decay_steps = learning_rate_decay_steps self.learning_rate_decay_rate = learning_rate_decay_rate self.optimizer = optimizer self.grad_clipping = grad_clipping self.gamma = gamma self.num_bootstrap_heads = num_bootstrap_heads self.double_q = double_q self.scope = scope self.reuse = reuse self.epsilon = epsilon def build(self): """Builds the computational graph and training operations.""" self._build_graph() self._build_training_ops() self._build_summary_ops() def _build_single_q_network(self, observations, head, state_t, state_tp1, done_mask, reward_t, error_weight): """Builds the computational graph for a single Q network. Briefly, this part is calculating the following two quantities: 1. q_value = q_fn(observations) 2. td_error = q_fn(state_t) - reward_t - gamma * q_fn(state_tp1) The optimization target is to minimize the td_error. Args: observations: shape = [batch_size, hparams.fingerprint_length]. The input of the Q function. head: shape = [1]. The index of the head chosen for decision in bootstrap DQN. state_t: shape = [batch_size, hparams.fingerprint_length]. The state at time step t. state_tp1: a list of tensors, with total number of batch_size, each has shape = [num_actions, hparams.fingerprint_length]. Note that the num_actions can be different for each tensor. The state at time step t+1, tp1 is short for t plus 1. done_mask: shape = [batch_size, 1] Whether state_tp1 is the terminal state. reward_t: shape = [batch_size, 1] the reward at time step t. error_weight: shape = [batch_size, 1] weight for the loss. Returns: q_values: Tensor of [batch_size, 1]. The q values for the observations. td_error: Tensor of [batch_size, 1]. The TD error. weighted_error: Tensor of [batch_size, 1]. The TD error weighted by error_weight. q_fn_vars: List of tf.Variables. The variables of q_fn when computing the q_values of state_t q_fn_vars: List of tf.Variables. The variables of q_fn when computing the q_values of state_tp1 """ with tf.variable_scope('q_fn'): # q_value have shape [batch_size, 1]. q_values = tf.gather(self.q_fn(observations), head, axis=-1) # calculating q_fn(state_t) # The Q network shares parameters with the action graph. with tf.variable_scope('q_fn', reuse=True): q_t = self.q_fn(state_t, reuse=True) q_fn_vars = tf.trainable_variables(scope=tf.get_variable_scope().name + '/q_fn') # calculating q_fn(state_tp1) with tf.variable_scope('q_tp1', reuse=tf.AUTO_REUSE): q_tp1 = [self.q_fn(s_tp1, reuse=tf.AUTO_REUSE) for s_tp1 in state_tp1] q_tp1_vars = tf.trainable_variables(scope=tf.get_variable_scope().name + '/q_tp1') if self.double_q: with tf.variable_scope('q_fn', reuse=True): q_tp1_online = [self.q_fn(s_tp1, reuse=True) for s_tp1 in state_tp1] if self.num_bootstrap_heads: num_heads = self.num_bootstrap_heads else: num_heads = 1 # determine the action to choose based on online Q estimator. q_tp1_online_idx = [ tf.stack( [tf.argmax(q, axis=0), tf.range(num_heads, dtype=tf.int64)], axis=1) for q in q_tp1_online ] # use the index from max online q_values to compute the value # function v_tp1 = tf.stack( [tf.gather_nd(q, idx) for q, idx in zip(q_tp1, q_tp1_online_idx)], axis=0) else: v_tp1 = tf.stack([tf.reduce_max(q) for q in q_tp1], axis=0) # if s_{t+1} is the terminal state, we do not evaluate the Q value of # the state. q_tp1_masked = (1.0 - done_mask) * v_tp1 q_t_target = reward_t + self.gamma * q_tp1_masked # stop gradient from flowing to the computating graph which computes # the Q value of s_{t+1}. # td_error has shape [batch_size, 1] td_error = q_t - tf.stop_gradient(q_t_target) # If use bootstrap, each head is trained with a different subset of the # training sample. Like the idea of dropout. if self.num_bootstrap_heads: head_mask = tf.keras.backend.random_binomial( shape=(1, self.num_bootstrap_heads), p=0.6) td_error = tf.reduce_mean(td_error * head_mask, axis=1) # The loss comes from a traditional trick in convex optimization: # http://web.stanford.edu/~boyd/cvxbook/. # See Chapter 6 pp. 298 # It will makes the optimization robust. # Specifically, the loss will use l1 instead of l2 loss when the td error # gets larger than 1.0. The l2 loss has the disadvantage that it has # the tendency to be dominated by outliers. In terms of estimation theory, # the asymptotic relative efficiency of the l1 loss estimator is better # for heavy-tailed distributions. errors = tf.where( tf.abs(td_error) < 1.0, tf.square(td_error) * 0.5, 1.0 * (tf.abs(td_error) - 0.5)) weighted_error = tf.reduce_mean(error_weight * errors) return q_values, td_error, weighted_error, q_fn_vars, q_tp1_vars def _build_input_placeholder(self): """Creates the input placeholders. Input placeholders created: observations: shape = [batch_size, hparams.fingerprint_length]. The input of the Q function. head: shape = [1]. The index of the head chosen for decision. state_t: shape = [batch_size, hparams.fingerprint_length]. The state at time step t. state_tp1: a list of tensors, each has shape = [num_actions, hparams.fingerprint_length]. Note that the num_actions can be different for each tensor. The state at time step t+1. done_mask: shape = [batch_size, 1] Whether state_tp1 is the terminal state. error_weight: shape = [batch_size, 1] weight for the loss. """ batch_size, fingerprint_length = self.input_shape with tf.variable_scope(self.scope, reuse=self.reuse): # Build the action graph to choose an action. # The observations, which are the inputs of the Q function. self.observations = tf.placeholder( tf.float32, [None, fingerprint_length], name='observations') # head is the index of the head we want to choose for decison. # See https://arxiv.org/abs/1703.07608 self.head = tf.placeholder(tf.int32, [], name='head') # When sample from memory, the batch_size can be fixed, as it is # possible to sample any number of samples from memory. # state_t is the state at time step t self.state_t = tf.placeholder( tf.float32, self.input_shape, name='state_t') # state_tp1 is the state at time step t + 1, tp1 is short for t plus 1. self.state_tp1 = [ tf.placeholder( tf.float32, [None, fingerprint_length], name='state_tp1_%i' % i) for i in range(batch_size) ] # done_mask is a {0, 1} tensor indicating whether state_tp1 is the # terminal state. self.done_mask = tf.placeholder( tf.float32, (batch_size, 1), name='done_mask') self.error_weight = tf.placeholder( tf.float32, (batch_size, 1), name='error_weight') def _build_graph(self): """Builds the computational graph. Input placeholders created: reward_t: shape = [batch_size, 1] the reward at time step t. Instance attributes created: q_values: the q values of the observations. q_fn_vars: the variables in q function. q_tp1_vars: the variables in q_tp1 function. td_error: the td_error. weighted_error: the weighted td error. action: the action to choose next step. """ batch_size, _ = self.input_shape with tf.variable_scope(self.scope, reuse=self.reuse): self._build_input_placeholder() self.reward_t = tf.placeholder( tf.float32, (batch_size, 1), name='reward_t') # The Q network shares parameters with the action graph. # tenors start with q or v have shape [batch_size, 1] when not using # bootstrap. When using bootstrap, the shapes are # [batch_size, num_bootstrap_heads] (self.q_values, self.td_error, self.weighted_error, self.q_fn_vars, self.q_tp1_vars) = self._build_single_q_network( self.observations, self.head, self.state_t, self.state_tp1, self.done_mask, self.reward_t, self.error_weight) self.action = tf.argmax(self.q_values) def _build_training_ops(self): """Creates the training operations. Instance attributes created: optimization_op: the operation of optimize the loss. update_op: the operation to update the q network. """ with tf.variable_scope(self.scope, reuse=self.reuse): self.optimization_op = contrib_layers.optimize_loss( loss=self.weighted_error, global_step=tf.train.get_or_create_global_step(), learning_rate=self.learning_rate, optimizer=self.optimizer, clip_gradients=self.grad_clipping, learning_rate_decay_fn=functools.partial( tf.train.exponential_decay, decay_steps=self.learning_rate_decay_steps, decay_rate=self.learning_rate_decay_rate), variables=self.q_fn_vars) self.update_op = [] for var, target in zip( sorted(self.q_fn_vars, key=lambda v: v.name), sorted(self.q_tp1_vars, key=lambda v: v.name)): self.update_op.append(target.assign(var)) self.update_op = tf.group(*self.update_op) def _build_summary_ops(self): """Creates the summary operations. Input placeholders created: smiles: the smiles string. reward: the reward. Instance attributes created: error_summary: the operation to log the summary of error. episode_summary: the operation to log the smiles string and reward. """ with tf.variable_scope(self.scope, reuse=self.reuse): with tf.name_scope('summaries'): # The td_error here is the difference between q_t and q_t_target. # Without abs(), the summary of td_error is actually underestimated. self.error_summary = tf.summary.scalar( 'td_error', tf.reduce_mean(tf.abs(self.td_error))) self.smiles = tf.placeholder(tf.string, [], 'summary_smiles') self.reward = tf.placeholder(tf.float32, [], 'summary_reward') smiles_summary = tf.summary.text('SMILES', self.smiles) reward_summary = tf.summary.scalar('reward', self.reward) self.episode_summary = tf.summary.merge( [smiles_summary, reward_summary]) def log_result(self, smiles, reward): """Summarizes the SMILES string and reward at the end of an episode. Args: smiles: String. The SMILES string. reward: Float. The reward. Returns: the summary protobuf """ return tf.get_default_session().run( self.episode_summary, feed_dict={ self.smiles: smiles, self.reward: reward }) def _run_action_op(self, observations, head): """Function that runs the op calculating an action given the observations. Args: observations: np.array. shape = [num_actions, fingerprint_length]. Observations that can be feed into the Q network. head: Integer. The output index to use. Returns: Integer. which action to be performed. """ return np.asscalar(tf.get_default_session().run( self.action, feed_dict={ self.observations: observations, self.head: head })) def get_action(self, observations, stochastic=True, head=0, update_epsilon=None): """Function that chooses an action given the observations. Args: observations: np.array. shape = [num_actions, fingerprint_length]. Observations that can be feed into the Q network. stochastic: Boolean. If set to False all the actions are always deterministic (default True). head: Integer. The output index to use. update_epsilon: Float or None. update epsilon a new value, if None no update happens (default: no update). Returns: Integer. which action to be performed. """ if update_epsilon is not None: self.epsilon = update_epsilon if stochastic and np.random.uniform() < self.epsilon: return np.random.randint(0, observations.shape[0]) else: return self._run_action_op(observations, head) def train(self, states, rewards, next_states, done, weight, summary=True): """Function that takes a transition (s,a,r,s') and optimizes Bellman error. Args: states: object, a batch of observations. rewards: np.array, immediate reward attained after executing those actions dtype must be float32 and shape must be (batch_size,). next_states: object, observations that followed states. done: np.array, 1 if obs_t was the last observation in the episode and 0 otherwise obs_tp1 gets ignored, but must be of the valid shape. dtype must be float32 and shape must be (batch_size,). weight: np.array, importance sampling weights for every element of the batch. dtype must be float32 and shape must be (batch_size,). summary: Boolean, whether to get summary. Returns: td_error: np.array. a list of differences between Q(s,a) and the target in Bellman's equation. dtype is float32 and shape is (batch_size,). """ if summary: ops = [self.td_error, self.error_summary, self.optimization_op] else: ops = [self.td_error, self.optimization_op] feed_dict = { self.state_t: states, self.reward_t: rewards, self.done_mask: done, self.error_weight: weight } for i, next_state in enumerate(next_states): feed_dict[self.state_tp1[i]] = next_state return tf.get_default_session().run(ops, feed_dict=feed_dict) class MultiObjectiveDeepQNetwork(DeepQNetwork): """Multi Objective Deep Q Network. The idea is described in Multiobjective Reinforcement Learning: A Comprehensive Overview https://ieeexplore.ieee.org/document/6918520/ Briefly, the difference between this Multi Objective Deep Q Network and a naive Deep Q Network is that this one uses one Q network for approximating each of the objectives. And a weighted sum of those Q values are used for decision making. The loss is the summation of the losses of each Q network. """ def __init__(self, objective_weight, **kwargs): """Creates the model function. Args: objective_weight: np.array with shape [num_objectives, 1]. The weight vector for the objectives. **kwargs: arguments for the DeepQNetworks class. """ # Normalize the sum to 1. self.objective_weight = objective_weight / np.sum(objective_weight) self.num_objectives = objective_weight.shape[0] super(MultiObjectiveDeepQNetwork, self).__init__(**kwargs) def _build_graph(self): """Builds the computational graph. Input placeholders created: observations: shape = [batch_size, hparams.fingerprint_length]. The input of the Q function. head: shape = [1]. The index of the head chosen for decision. objective_weight: shape = [num_objectives, 1]. objective_weight is the weight to scalarize the objective vector: reward = sum (objective_weight_i * objective_i) state_t: shape = [batch_size, hparams.fingerprint_length]. The state at time step t. state_tp1: a list of tensors, each has shape = [num_actions, hparams.fingerprint_length]. Note that the num_actions can be different for each tensor. The state at time step t+1. done_mask: shape = [batch_size, 1] Whether state_tp1 is the terminal state. reward_t: shape = [batch_size, num_objectives] the reward at time step t. error weight: shape = [batch_size, 1] weight for the loss. Instance attributes created: q_values: List of Tensors of [batch_size, 1]. The q values for the observations. td_error: List of Tensor of [batch_size, 1]. The TD error. weighted_error: List of Tensor of [batch_size, 1]. The TD error weighted by importance sampling weight. q_fn_vars: List of tf.Variables. The variables of q_fn when computing the q_values of state_t q_fn_vars: List of tf.Variables. The variables of q_fn when computing the q_values of state_tp1 """ batch_size, _ = self.input_shape with tf.variable_scope(self.scope, reuse=self.reuse): self._build_input_placeholder() self.reward_t = tf.placeholder( tf.float32, (batch_size, self.num_objectives), name='reward_t') # objective_weight is the weight to scalarize the objective vector: # reward = sum (objective_weight_i * objective_i) self.objective_weight_input = tf.placeholder( tf.float32, [self.num_objectives, 1], name='objective_weight') # split reward for each q network rewards_list = tf.split(self.reward_t, self.num_objectives, axis=1) q_values_list = [] self.td_error = [] self.weighted_error = 0 self.q_fn_vars = [] self.q_tp1_vars = [] # build a Q network for each objective for obj_idx in range(self.num_objectives): with tf.variable_scope('objective_%i' % obj_idx): (q_values, td_error, weighted_error, q_fn_vars, q_tp1_vars) = self._build_single_q_network( self.observations, self.head, self.state_t, self.state_tp1, self.done_mask, rewards_list[obj_idx], self.error_weight) q_values_list.append(tf.expand_dims(q_values, 1)) # td error is for summary only. # weighted error is the optimization goal. self.td_error.append(td_error) self.weighted_error += weighted_error / self.num_objectives self.q_fn_vars += q_fn_vars self.q_tp1_vars += q_tp1_vars q_values = tf.concat(q_values_list, axis=1) # action is the one that leads to the maximum weighted reward. self.action = tf.argmax( tf.matmul(q_values, self.objective_weight_input), axis=0) def _build_summary_ops(self): """Creates the summary operations. Input placeholders created: smiles: the smiles string. rewards: the rewards. weighted_reward: the weighted sum of the rewards. Instance attributes created: error_summary: the operation to log the summary of error. episode_summary: the operation to log the smiles string and reward. """ with tf.variable_scope(self.scope, reuse=self.reuse): with tf.name_scope('summaries'): # The td_error here is the difference between q_t and q_t_target. # Without abs(), the summary of td_error is actually underestimated. error_summaries = [ tf.summary.scalar('td_error_%i' % i, tf.reduce_mean(tf.abs(self.td_error[i]))) for i in range(self.num_objectives) ] self.error_summary = tf.summary.merge(error_summaries) self.smiles = tf.placeholder(tf.string, [], 'summary_smiles') self.rewards = [ tf.placeholder(tf.float32, [], 'summary_reward_obj_%i' % i) for i in range(self.num_objectives) ] # Weighted sum of the rewards. self.weighted_reward = tf.placeholder(tf.float32, [], 'summary_reward_sum') smiles_summary = tf.summary.text('SMILES', self.smiles) reward_summaries = [ tf.summary.scalar('reward_obj_%i' % i, self.rewards[i]) for i in range(self.num_objectives) ] reward_summaries.append( tf.summary.scalar('sum_reward', self.rewards[-1])) self.episode_summary = tf.summary.merge([smiles_summary] + reward_summaries) def log_result(self, smiles, reward): """Summarizes the SMILES string and reward at the end of an episode. Args: smiles: String. The SMILES string. reward: List of Float. The rewards for each objective. Returns: the summary protobuf. """ feed_dict = { self.smiles: smiles, } for i, reward_value in enumerate(reward): feed_dict[self.rewards[i]] = reward_value # calculated the weighted sum of the rewards. feed_dict[self.weighted_reward] = np.asscalar( np.array([reward]).dot(self.objective_weight)) return tf.get_default_session().run( self.episode_summary, feed_dict=feed_dict) def _run_action_op(self, observations, head): """Function that runs the op calculating an action given the observations. Args: observations: np.array. shape = [num_actions, fingerprint_length]. Observations that can be feed into the Q network. head: Integer. The output index to use. Returns: Integer. which action to be performed. """ return np.asscalar(tf.get_default_session().run( self.action, feed_dict={ self.observations: observations, self.objective_weight_input: self.objective_weight, self.head: head })) def multi_layer_model(inputs, hparams, reuse=None): """Multi-layer model for q learning. Args: inputs: Tensor. The input. hparams: tf.HParameters. The hyper-parameters. reuse: Boolean. Whether the parameters should be reused. Returns: Tensor. shape = [batch_size, hparams.num_bootstrap_heads]. The output. """ output = inputs for i, units in enumerate(hparams.dense_layers): output = tf.layers.dense(output, units, name='dense_%i' % i, reuse=reuse) output = getattr(tf.nn, hparams.activation)(output) if hparams.batch_norm: output = tf.layers.batch_normalization( output, fused=True, name='bn_%i' % i, reuse=reuse) if hparams.num_bootstrap_heads: output_dim = hparams.num_bootstrap_heads else: output_dim = 1 output = tf.layers.dense(output, output_dim, name='final', reuse=reuse) return output def get_hparams(**kwargs): """Get the hyperparameters for the model from a json object. Args: **kwargs: Dict of parameter overrides. Possible keyword arguments: atom_types: Dict. The possible atom types in the molecule. max_steps_per_episode: Integer. The maximum number of steps for one episode. allow_removal: Boolean. Whether to allow removal of a bond. allow_no_modification: Boolean. If true, the valid action set will include doing nothing to the current molecule, i.e., the current molecule itself will be added to the action set. replay_buffer_size: Integer. The size of the replay buffer. learning_rate: Float. Learning rate. learning_rate_decay_steps: Integer. The number of steps between each learning rate decay. learning_rate_decay_rate: Float. The rate of learning rate decay. num_episodes: Integer. Number of episodes to run. batch_size: Integer. The batch size. learning_frequency: Integer. The number of steps between each training operation. update_frequency: Integer. The number of steps between each update of the target Q network grad_clipping: Integer. maximum value of the gradient norm. gamma: Float. The discount factor for the reward. double_q: Boolean. Whether to used double Q learning. See https://arxiv.org/abs/1509.06461 for detail. bootstrap: Integer. The number of bootstrap heads. See https://arxiv.org/abs/1703.07608 for detail. prioritized: Boolean. Whether to use prioritized replay. See https://arxiv.org/abs/1511.05952 for detail. prioritized_alpha: Float. The parameter alpha in the prioritized replay. prioritized_beta: Float. The parameter beta in the prioritized replay. prioritized_epsilon: Float. The parameter epsilon in the prioritized replay. fingerprint_radius: Integer. The radius of the Morgan fingerprint. fingerprint_length: Integer. The length of the Morgan fingerprint. dense_layers: List of integers. The hidden units in the dense layers. activation: String. The activation function to use. optimizer: String. The optimizer to use. batch_norm: Boolean. Whether to use batch normalization. save_frequency: Integer. The number of episodes between each saving. Returns: A HParams object containing all the hyperparameters. """ hparams = contrib_training.HParams( atom_types=['C', 'O', 'N'], max_steps_per_episode=40, allow_removal=True, allow_no_modification=True, allow_bonds_between_rings=False, allowed_ring_sizes=[3, 4, 5, 6], replay_buffer_size=1000000, learning_rate=1e-4, learning_rate_decay_steps=10000, learning_rate_decay_rate=0.8, num_episodes=5000, batch_size=64, learning_frequency=4, update_frequency=20, grad_clipping=10.0, gamma=0.9, double_q=True, num_bootstrap_heads=12, prioritized=False, prioritized_alpha=0.6, prioritized_beta=0.4, prioritized_epsilon=1e-6, fingerprint_radius=3, fingerprint_length=2048, dense_layers=[1024, 512, 128, 32], activation='relu', optimizer='Adam', batch_norm=False, save_frequency=1000, max_num_checkpoints=100, discount_factor=0.7) return hparams.override_from_dict(kwargs) def get_fingerprint(smiles, hparams): """Get Morgan Fingerprint of a specific SMILES string. Args: smiles: String. The SMILES string of the molecule. hparams: tf.contrib.training.HParams. Hyper parameters. Returns: np.array. shape = [hparams.fingerprint_length]. The Morgan fingerprint. """ if smiles is None: return np.zeros((hparams.fingerprint_length,)) molecule = Chem.MolFromSmiles(smiles) if molecule is None: return np.zeros((hparams.fingerprint_length,)) fingerprint = AllChem.GetMorganFingerprintAsBitVect( molecule, hparams.fingerprint_radius, hparams.fingerprint_length) arr = np.zeros((1,)) # ConvertToNumpyArray takes ~ 0.19 ms, while # np.asarray takes ~ 4.69 ms DataStructs.ConvertToNumpyArray(fingerprint, arr) return arr def get_fingerprint_with_steps_left(smiles, steps_left, hparams): """Get Morgan Fingerprint of a SMILES string with number of steps left. If fixing the max num of steps can be taken in a MDP, the MDP is then a time-heterogeneous one. Therefore a time dependent policy is needed for optimal performance. Args: smiles: String. The SMILES string of the molecule. steps_left: Integer. The number of steps left in the environment. hparams: tf.contrib.training.HParams. Hyper parameters. Returns: np.array. shape = [hparams.fingerprint_length + 1]. The fingerprint. """ fingerprint = get_fingerprint(smiles, hparams) return np.append(fingerprint, steps_left)
Your company will need a PSV operator’s licence for your PSV driver, also called PCV driver since a regulatory change in 1992 (!), to be able to drive a vehicle that carries 9 or more passengers, or if you’re planning to charge separate fares for the journey. You’ll need to ensure that the PSV driver you’re hiring has a Category D licence. Depending on the size and type of vehicle you’ll want them to drive, you may need them to have a Category D1 licence (if you need them to drive a minibus with seats for 9 to 16 passengers with a trailer of up to 750kg), D1 + E (if you need them to drive a minibus with seats for up to 16 passengers and a trailer weighing more than 750kg), D (if they’ll drive a bus with more than 8 seats and a trailer of 750 kg), or D + E (if they’ll drive any coach or bus and tow a trailer weighing more than 750 kg). If your future driver is responsible to drive vulnerable people (e.g., children to and from their schools, passengers with disabilities), you’ll need to ensure to perform enhanced background checks, now called DBS checks. A good PSV driver is not just good at driving, but should provide great customer service too - helping older passengers with their luggage for example, with patience. Finally, a great knowledge of the local area is key so if you’re using a recruitment agency to find your PSV driver, don’t forget to request for your future PSV driver to have lived in the area for a while. Typically, recruitment agencies will charge between £15.81 and £18.30 per hour for PSV drivers. This includes what the candidate is paid, their National Insurance and holiday payments, as well as the agency margin. Your budget should take into account the experience of the secretary and their working location. For example, expect to pay more in London, where the Living Wage is £10.55 per hour.
############################################################################### # lazyflow: data flow based lazy parallel computation framework # # Copyright (C) 2011-2014, the ilastik developers # <team@ilastik.org> # # This program is free software; you can redistribute it and/or # modify it under the terms of the Lesser GNU General Public License # as published by the Free Software Foundation; either version 2.1 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # See the files LICENSE.lgpl2 and LICENSE.lgpl3 for full text of the # GNU Lesser General Public License version 2.1 and 3 respectively. # This information is also available on the ilastik web site at: # http://ilastik.org/license/ ############################################################################### from abc import abstractmethod, ABCMeta #lazyflow from lazyflow.operators.cacheMemoryManager import CacheMemoryManager class Cache(object): """ Interface for objects that act as caches. This is a mixin, use as >>> class MyCachingOperator(Cache, Operator): ... pass This interface is designed for operators that hold values but can neither be queried for their memory usage nor be cleaned up. All operators that have non-negligible amounts of memory allocated internally *must* implement this interface. However, most operators that need to implement this interface *should* probably implement an extended interface (see below). This interface can still be useful for several purposes: * tell the user about memory consuming objects in general (e.g. in an environment like ilastik) * automated statistics and tests Almost all caches will want to call self.registerWithMemoryManager() to be handled by the cache memory manager thread. WARNING: If you plan to do time consuming operations in your __init__, be sure to make all cache API methods threadsafe. A cache cleanup could occur while the cache is still under construction! """ __metaclass__ = ABCMeta def registerWithMemoryManager(self): manager = CacheMemoryManager() if self.parent is None or not isinstance(self.parent, Cache): manager.addFirstClassCache(self) else: manager.addCache(self) def generateReport(self, memInfoNode): rs = [] for child in self.children: if not isinstance(child, Cache): continue r = MemInfoNode() child.generateReport(r) rs.append(r) memInfoNode.children = rs memInfoNode.type = type(self) memInfoNode.id = id(self) memInfoNode.name = self.name class ObservableCache(Cache): """ Interface for caches that can report their usage This interface is intended for caches that can be measured, but for which no (easy) cleanup method is known, or which do not want to be cleaned up by the cache memory manager. """ @abstractmethod def usedMemory(self): """ get used memory in bytes of this cache and all observable children """ total = 0 for child in self.children: if isinstance(child, ObservableCache): total += child.usedMemory() return 0 @abstractmethod def fractionOfUsedMemoryDirty(self): """ get fraction of used memory that is in a dirty state Dirty memory is memory that has been allocated, but cannot be used anymore. It is ok to always return 0 if there is no dirtiness management inside the cache. The returned value must lie in the range [0, 1]. """ return 0.0 def generateReport(self, memInfoNode): super(ObservableCache, self).generateReport(memInfoNode) memInfoNode.usedMemory = self.usedMemory() memInfoNode.fractionOfUsedMemoryDirty =\ self.fractionOfUsedMemoryDirty() class ManagedCache(ObservableCache): """ Interface for caches that can report their usage and can be cleaned up """ _last_access_time = 0.0 @abstractmethod def lastAccessTime(self): """ get the timestamp of the last access (python timestamp) In general, time.time() should be used here. Don't be afraid to use the default implementation, i.e. fill the attribute _last_access_time. """ return self._last_access_time @abstractmethod def freeMemory(self): """ free all memory cached by this operator and its children The result of `freeMemory()` should be compatible with `usedMemory()`, i.e. >>> a = cache.usedMemory() >>> d = cache.freeMemory() >>> a - d == cache.usedMemory() True @return amount of bytes freed (if applicable) """ raise NotImplementedError( "No default implementation for freeMemory()") @abstractmethod def freeDirtyMemory(self): """ free all memory cached by this operator and its children that is marked as dirty This should not delete any non-dirty memory @return amount of bytes freed (if applicable) """ raise NotImplementedError( "No default implementation for freeDirtyMemory()") def generateReport(self, memInfoNode): super(ManagedCache, self).generateReport(memInfoNode) memInfoNode.lastAccessTime = self.lastAccessTime() class ManagedBlockedCache(ManagedCache): """ Interface for caches that can be managed in more detail """ def lastAccessTime(self): """ get the timestamp of the last access (python timestamp) The default method is to use the maximum of the block timestamps. """ t = map(lambda x: x[1], self.getBlockAccessTimes()) if not t: return 0.0 else: return max(t) @abstractmethod def getBlockAccessTimes(self): """ get a list of block ids and their time stamps """ raise NotImplementedError( "No default implementation for getBlockAccessTimes()") @abstractmethod def freeBlock(self, block_id): """ free memory in a specific block The block_id argument must have been in the result of a call to getBlockAccessTimes. When all blocks returned by getBlockAccessTimes() are freed, the cache should be empty. @return amount of bytes freed (if applicable) """ raise NotImplementedError( "No default implementation for freeBlock()") class MemInfoNode: """ aggregation of cache status indicators """ # type type = None # object id id = None # used memory in bytes usedMemory = None # data type of single cache elements (if applicable) dtype = None # a region of interest this cache is assigned to # (mostly useful for wrapped caches as in OpBlockedArrayCache) roi = None # fraction of used memory that is dirty fractionOfUsedMemoryDirty = None # python timestamp of last access lastAccessTime = None # operator name name = None # additional info set by cache implementation info = None # reports for all of this operators children that are of type # OpObservableCache children = None def __init__(self): self.children = list()
The Mythos XTR series models are ultra-slim on-wall/on-shelf high performance loudspeakers designed to match today's super shallow TVs. The award winning Mythos XTR-50 was the first ultra-shallow (only 1.5" deep) high performance speaker to enter the market. It's patent pending technologies and premium materials yielded sound performance that equaled or exceeded full-depth on-wall speakers. Now there are four XTR series models to answer any application need, including a dedicated surround channel speaker.
import urllib2 import urllib import urlparse import xml.etree.ElementTree import re from htmlentitydefs import name2codepoint import hashlib import sys import logging import time import os import json class Tvst(object): CLIENT_ID = 'va0D2CEfSPNNlLoYMYYT' CLIENT_SECRET = 'RF51gSEZBJAbLXmEUCZ8thJAwJPAyQSafCQCyqOt' USER_AGENT = 'plex-tvst-scrobbler' def __init__(self, cfg): self.logger = logging.getLogger(__name__) self.cfg = cfg def get_session(self): if os.path.exists(self.cfg.get('plex-tvst-scrobbler', 'session')): sessfp = open(self.cfg.get('plex-tvst-scrobbler', 'session'), 'r') session = sessfp.read().strip() sessfp.close() return session def _do_tvst_post(self, url, data): f = urllib2.Request(url) f.add_header('User-Agent', self.USER_AGENT) try: res = urllib2.urlopen(f, data) return json.load(res) except urllib2.URLError, e: self.logger.error('Unable to submit post data {url} - {error}'.format( url=url, error=e)) raise def _get_auth_infos(self): args = { 'client_id': self.CLIENT_ID } url = urlparse.urlunparse(('https', 'api.tvshowtime.com', '/v1/oauth/device/code', '', '', '')) res = self._do_tvst_post(url, urllib.urlencode(args)) return res def _get_access_token(self, code): args = { 'client_id': self.CLIENT_ID, 'client_secret': self.CLIENT_SECRET, 'code': code, } url = urlparse.urlunparse(('https', 'api.tvshowtime.com', '/v1/oauth/access_token', '', '', '')) res = self._do_tvst_post(url, urllib.urlencode(args)) return res def scrobble(self, show_id, season_number, number): session = self.get_session() self.logger.info(u'submitting {show_id} - S{season_number}E{number} to tvshowtime.com.'.format( show_id=show_id, season_number=season_number.zfill(2), number=number.zfill(2))) args = { 'access_token': session, 'show_id': show_id, 'season_number': season_number.zfill(2), 'number': number.zfill(2) } url = urlparse.urlunparse(('https', 'api.tvshowtime.com', '/v1/checkin', '', '', '')) try: res = self._do_tvst_post(url, urllib.urlencode(args)) except: return False return True def tvst_auth(self): print '== Requesting tvshowtime.com auth ==' auth_infos = self._get_auth_infos() accepted = 'n' print '\nPlease do the following to authorize the scrobbler:\n\n1/ Connect on {auth_url}\n2/ Enter the code: {code}'.format( auth_url=auth_infos['verification_url'], code=auth_infos['user_code']) while accepted.lower() == 'n': print accepted = raw_input('Have you authorized me [y/N] :') try: access_token_infos = self._get_access_token(auth_infos['device_code']) except urllib2.HTTPError, e: self.logger.error('Unable to send authorization request {error}'.format(error=e)) return False if access_token_infos['result'] != 'OK': print access_token_infos['message'] return token = access_token_infos['access_token'] fp = open(self.cfg.get('plex-tvst-scrobbler', 'session'), 'w') fp.write(token) fp.close() self.logger.info('TVShow Time authorization successful.')
The Test Valley School Computing Department have been very active within the local community this term. Test Valley School have been collaborating with local Primary Schools to develop innovative Computer Science sessions, designed to increase pupil engagement and interest in the cutting edge field of Computer Programming and Game Design. This term, Test Valley School visited Lockerley Primary School and Stockbridge Primary School to run the sessions, with many more schools to benefit from the program from September. The sessions, run by the new Head of Department Mr Willcox, allow Year 5 and 6 pupils to explore computational concepts using game Design Software created by Microsoft called ‘Kodu Game Labs’. The 2 hour sessions engage pupils by introducing them to Computer Science concepts such as writing algorithms, exploring functions and understanding variables by producing a computer game from the beginning. Pupils are required to build a virtual world, insert characters and objects, and then use ‘code’ to create algorithms and control all aspects of the virtual game. “When pupils join Test Valley School they are now learning how to write algorithms, implement variables and comment of the flow of computational information from Year 7, it is such an exciting time for pupils to become part of the new Computer Science Curriculum” – Head of Department Mr Willcox. Test Valley School will continue to run sessions for Primary Schools across the local area into the Autumn term, engaging pupils in Computer Science.
# Copyright (C) 2020 Christopher Gearhart # chris@bblanimation.com # http://bblanimation.com/ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # System imports # NONE! # Blender imports import bpy # Module imports from ..common import * from ..general import * def get_material_name(bricksdict, key, size, zstep, material_type, mat_shell_depth=1, custom_mat=None, random_mat_seed=1000, brick_mats=None): mat = None if bricksdict[key]["custom_mat_name"] and is_mat_shell_val(bricksdict[key]["val"], mat_shell_depth): mat = bpy.data.materials.get(bricksdict[key]["mat_name"]) elif material_type == "CUSTOM": mat = custom_mat elif material_type == "SOURCE": mat_name = get_most_frequent_mat_name(bricksdict, key, size, zstep, mat_shell_depth) # get the material for that mat_name mat = bpy.data.materials.get(mat_name) elif material_type == "RANDOM" and brick_mats is not None and len(brick_mats) > 0: if len(brick_mats) > 1: rand_state = np.random.RandomState(0) rand_state.seed(random_mat_seed + int(str(hash(key))[-9:])) rand_idx = rand_state.randint(0, len(brick_mats)) else: rand_idx = 0 mat_name = brick_mats[rand_idx] mat = bpy.data.materials.get(mat_name) mat_name = "" if mat is None else mat.name return mat_name def get_most_frequent_mat_name(bricksdict, key, size, zstep, mat_shell_depth): # initialize vars highest_val = 0 mats_L = [] mat_name = "" # get most frequent material in brick size keys_in_brick = get_keys_in_brick(bricksdict, size, zstep, key=key) for key0 in keys_in_brick: cur_brick_d = bricksdict[key0] if cur_brick_d["val"] >= highest_val: highest_val = cur_brick_d["val"] mat_name = cur_brick_d["mat_name"] if is_mat_shell_val(cur_brick_d["val"], mat_shell_depth) and mat_name: mats_L.append(mat_name) # if multiple shell materials, use the most frequent one if len(mats_L) > 1: mat_name = most_common(mats_L) return mat_name def is_mat_shell_val(val, mat_shell_depth=1): return (1 - val) * 100 < mat_shell_depth
Cruise Planners, an American Express Travel Representative, is the nation’s largest home-based travel agent franchise network. As a Cruise Planners travel franchise owner, you have the flexibility to run your own travel agency business from anywhere. Cruise Planners provides you with access to award-winning, innovative marketing; cutting-edge mobile technology; dynamic, hands-on training; lead-generating tools, as well as professional coaching and development. You do not need travel agent experience to be a successful Cruise Planners travel agent. Cruise Planners has cutting-edge proprietary travel reservation & booking software. Our Training and Business Development teams will guide you through everything. Our award-winning marketing programs and tools keeps you top of mind.
import pyopencl as cl import pyopencl.array as clarray from time import time import numpy as np import os from median_of_medians import base_path from common import * from numpy import uint32, int32 common_lib_path = base_path #ctx = cl.create_some_context() platform = cl.get_platforms()[0] devices = [device for device in platform.get_devices() if device.type == cl.device_type.GPU] device = [devices[0]] queue_properties = cl.command_queue_properties.PROFILING_ENABLE | cl.command_queue_properties.OUT_OF_ORDER_EXEC_MODE_ENABLE ctx = cl.Context(devices) queues = [cl.CommandQueue(ctx, device, properties=queue_properties) for device in devices] #multicontext #ctxs = [cl.Context(device) for device in devices] #queues = [cl.CommandQueue(ctx, device, properties=queue_properties) for ctx, device in zip(ctxs, devices)] queue = queues[0] computeUnits = device.max_compute_units device_wg_size = min([wavefront_wg_size(device) for device in devices]) default_wg_size = device_wg_size is_amd_platform = all([is_device_amd(device) for device in devices]) is_nvidia_platform = all([is_device_nvidia(device) for device in devices]) def cl_opt_decorate(kop, CL_FLAGS, max_wg_size_used = None): if is_amd_platform: CL_FLAGS2 = '-D AMD_ARCH -D DEVICE_WAVEFRONT_SIZE={wavefront_size} '.format(wavefront_size=device_wg_size) if max_wg_size_used is not None and np.prod(max_wg_size_used, dtype=np.uint32) <= device_wg_size: CL_FLAGS2 = CL_FLAGS2 + '-D PROMISE_WG_IS_WAVEFRONT ' CL_FLAGS = CL_FLAGS2 + CL_FLAGS elif is_nvidia_platform: CL_FLAGS2 = '-D NVIDIA_ARCH -D DEVICE_WAVEFRONT_SIZE={wavefront_size} '.format(wavefront_size=device_wg_size) #if max_wg_size_used is not None and np.prod(max_wg_size_used, dtype=np.uint32) <= device_wg_size: # CL_FLAGS2 = CL_FLAGS2 + '-D PROMISE_WG_IS_WAVEFRONT ' #causes segfault in NvCliCompileBitcode - seems like internal compiler error CL_FLAGS = CL_FLAGS2 + CL_FLAGS if kop.debug == 2: CL_FLAGS = '-D DEBUG -g -cl-opt-disable '+CL_FLAGS elif kop.debug: CL_FLAGS = '-D DEBUG '+CL_FLAGS return CL_FLAGS def green(image): return image[:, :, 1].copy()
Past research I have been involved in include the management of traditional woody features such as hedges for bioenergy (TWECOM), conservation tillage (TILMAN-ORG), and the use of cover crops in organic rotations (OSCAR). I am interested whole system approaches which link landscape ecology with sustainable food production systems and specifically in practical applied research which both connects and engages the farming community. Chambers, E.M., Crossland, E.M., Westaway, S. and Smith, J. (2015) Hedgerow harvesting machinery trials report for TWECOM. The Organic Research Centre. Crossland, E. M., Westaway, S., Gerrard, C. and Smith, J. (2015) Hedgerow Biodiversity Protocol 2015. User Guide. A protocol for monitoring the impacts of harvesting hedges for woodfuel. The Organic Research Centre.
# Copyright (c) 2007, 2008 Red Hat, Inc. # # This file is part of the Qpid async store library msgstore.so. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 # USA # # The GNU Lesser General Public License is available in the file COPYING. import sys, re, traceback, socket from getopt import getopt, GetoptError from qpid.connection import Connection from qpid.util import connect from qpid.datatypes import Message, RangedSet from qpid.queue import Empty from qpid.session import SessionException from qpid.testlib import TestBase010 from time import sleep class PersistenceTest(TestBase010): XA_RBROLLBACK = 1 XA_RBTIMEOUT = 2 XA_OK = 0 def createMessage(self, **kwargs): session = self.session dp = {} dp['delivery_mode'] = 2 mp = {} for k, v in kwargs.iteritems(): if k in ['routing_key', 'delivery_mode']: dp[k] = v if k in ['message_id', 'correlation_id', 'application_headers']: mp[k] = v args = [] args.append(session.delivery_properties(**dp)) if len(mp): args.append(session.message_properties(**mp)) if kwargs.has_key('body'): args.append(kwargs['body']) return Message(*args) def phase1(self): session = self.session session.queue_declare(queue="queue-a", durable=True) session.queue_declare(queue="queue-b", durable=True) session.exchange_bind(queue="queue-a", exchange="amq.direct", binding_key="a") session.exchange_bind(queue="queue-b", exchange="amq.direct", binding_key="b") session.message_transfer(destination="amq.direct", message=self.createMessage(routing_key="a", correlation_id="Msg0001", body="A_Message1")) session.message_transfer(destination="amq.direct", message=self.createMessage(routing_key="b", correlation_id="Msg0002", body="B_Message1")) # session.queue_declare(queue="lvq-test", durable=True, arguments={"qpid.last_value_queue":True}) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"B"}, body="B1")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"A"}, body="A1")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"A"}, body="A2")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"B"}, body="B2")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"B"}, body="B3")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"C"}, body="C1")) def phase2(self): session = self.session #check queues exists session.queue_declare(queue="queue-a", durable=True, passive=True) session.queue_declare(queue="queue-b", durable=True, passive=True) #check they are still bound to amq.direct correctly responses = [] responses.append(session.exchange_bound(queue="queue-a", exchange="amq.direct", binding_key="a")) responses.append(session.exchange_bound(queue="queue-b", exchange="amq.direct", binding_key="b")) for r in responses: self.assert_(not r.exchange_not_found) self.assert_(not r.queue_not_found) self.assert_(not r.key_not_matched) #check expected messages are there self.assertMessageOnQueue("queue-a", "Msg0001", "A_Message1") self.assertMessageOnQueue("queue-b", "Msg0002", "B_Message1") self.assertEmptyQueue("queue-a") self.assertEmptyQueue("queue-b") session.queue_declare(queue="queue-c", durable=True) #send a message to a topic such that it reaches all queues session.exchange_bind(queue="queue-a", exchange="amq.topic", binding_key="abc") session.exchange_bind(queue="queue-b", exchange="amq.topic", binding_key="abc") session.exchange_bind(queue="queue-c", exchange="amq.topic", binding_key="abc") session.message_transfer(destination="amq.topic", message=self.createMessage(routing_key="abc", correlation_id="Msg0003", body="AB_Message2")) # #check LVQ exists and has exepected messages: # session.queue_declare(queue="lvq-test", durable=True, passive=True) # session.message_subscribe(destination="lvq", queue="lvq-test") # lvq = session.incoming("lvq") # lvq.start() # accepted = RangedSet() # for m in ["A2", "B3", "C1"]: # msg = lvq.get(timeout=1) # self.assertEquals(m, msg.body) # accepted.add(msg.id) # try: # extra = lvq.get(timeout=1) # self.fail("lvq-test not empty, contains: " + extra.body) # except Empty: None # #publish some more messages while subscriber is active (no replacement): # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"C"}, body="C2")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"C"}, body="C3")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"A"}, body="A3")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"A"}, body="A4")) # session.message_transfer(message=self.createMessage(routing_key="lvq-test", application_headers={"qpid.LVQ_key":"C"}, body="C4")) # #check that accepting replaced messages is safe # session.message_accept(accepted) def phase3(self): session = self.session # #lvq recovery validation # session.queue_declare(queue="lvq-test", durable=True, passive=True) # session.message_subscribe(destination="lvq", queue="lvq-test") # lvq = session.incoming("lvq") # lvq.start() # accepted = RangedSet() # lvq.start() # for m in ["C4", "A4"]: # msg = lvq.get(timeout=1) # self.assertEquals(m, msg.body) # accepted.add(msg.id) # session.message_accept(accepted) # try: # extra = lvq.get(timeout=1) # self.fail("lvq-test not empty, contains: " + extra.body) # except Empty: None # session.message_cancel(destination="lvq") # session.queue_delete(queue="lvq-test") #check queues exists session.queue_declare(queue="queue-a", durable=True, passive=True) session.queue_declare(queue="queue-b", durable=True, passive=True) session.queue_declare(queue="queue-c", durable=True, passive=True) session.tx_select() #check expected messages are there self.assertMessageOnQueue("queue-a", "Msg0003", "AB_Message2") self.assertMessageOnQueue("queue-b", "Msg0003", "AB_Message2") self.assertMessageOnQueue("queue-c", "Msg0003", "AB_Message2") self.assertEmptyQueue("queue-a") self.assertEmptyQueue("queue-b") self.assertEmptyQueue("queue-c") #note: default bindings must be restored for this to work session.message_transfer(message=self.createMessage( routing_key="queue-a", correlation_id="Msg0004", body="A_Message3")) session.message_transfer(message=self.createMessage( routing_key="queue-a", correlation_id="Msg0005", body="A_Message4")) session.message_transfer(message=self.createMessage( routing_key="queue-a", correlation_id="Msg0006", body="A_Message5")) session.tx_commit() #delete a queue session.queue_delete(queue="queue-c") session.message_subscribe(destination="ctag", queue="queue-a", accept_mode=0) session.message_flow(destination="ctag", unit=0, value=0xFFFFFFFF) session.message_flow(destination="ctag", unit=1, value=0xFFFFFFFF) included = session.incoming("ctag") msg1 = included.get(timeout=1) self.assertExpectedContent(msg1, "Msg0004", "A_Message3") msg2 = included.get(timeout=1) self.assertExpectedContent(msg2, "Msg0005", "A_Message4") msg3 = included.get(timeout=1) self.assertExpectedContent(msg3, "Msg0006", "A_Message5") self.ack(msg1, msg2, msg3) session.message_transfer(destination="amq.direct", message=self.createMessage( routing_key="queue-b", correlation_id="Msg0007", body="B_Message3")) session.tx_rollback() def phase4(self): session = self.session #check queues exists session.queue_declare(queue="queue-a", durable=True, passive=True) session.queue_declare(queue="queue-b", durable=True, passive=True) self.assertMessageOnQueue("queue-a", "Msg0004", "A_Message3") self.assertMessageOnQueue("queue-a", "Msg0005", "A_Message4") self.assertMessageOnQueue("queue-a", "Msg0006", "A_Message5") self.assertEmptyQueue("queue-a") self.assertEmptyQueue("queue-b") #check this queue doesn't exist try: session.queue_declare(queue="queue-c", durable=True, passive=True) raise Exception("Expected queue-c to have been deleted") except SessionException, e: self.assertEquals(404, e.args[0].error_code) def phase5(self): session = self.session queues = ["queue-a1", "queue-a2", "queue-b1", "queue-b2", "queue-c1", "queue-c2", "queue-d1", "queue-d2"] for q in queues: session.queue_declare(queue=q, durable=True) session.queue_purge(queue=q) session.message_transfer(message=self.createMessage( routing_key="queue-a1", correlation_id="MsgA", body="MessageA")) session.message_transfer(message=self.createMessage( routing_key="queue-b1", correlation_id="MsgB", body="MessageB")) session.message_transfer(message=self.createMessage( routing_key="queue-c1", correlation_id="MsgC", body="MessageC")) session.message_transfer(message=self.createMessage( routing_key="queue-d1", correlation_id="MsgD", body="MessageD")) session.dtx_select() txa = self.xid('a') txb = self.xid('b') txc = self.xid('c') txd = self.xid('d') self.txswap("queue-a1", "queue-a2", txa) self.txswap("queue-b1", "queue-b2", txb) self.txswap("queue-c1", "queue-c2", txc) self.txswap("queue-d1", "queue-d2", txd) #no queue should have any messages accessible for q in queues: self.assertEqual(0, session.queue_query(queue=q).message_count, "Bad count for %s" % (q)) self.assertEqual(self.XA_OK, session.dtx_commit(xid=txa, one_phase=True).status) self.assertEqual(self.XA_OK, session.dtx_rollback(xid=txb).status) self.assertEqual(self.XA_OK, session.dtx_prepare(xid=txc).status) self.assertEqual(self.XA_OK, session.dtx_prepare(xid=txd).status) #further checks not_empty = ["queue-a2", "queue-b1"] for q in queues: if q in not_empty: self.assertEqual(1, session.queue_query(queue=q).message_count, "Bad count for %s" % (q)) else: self.assertEqual(0, session.queue_query(queue=q).message_count, "Bad count for %s" % (q)) def phase6(self): session = self.session #check prepared transaction are reported correctly by recover txc = self.xid('c') txd = self.xid('d') xids = session.dtx_recover().in_doubt ids = [x.global_id for x in xids] #TODO: come up with nicer way to test these if txc.global_id not in ids: self.fail("Recovered xids not as expected. missing: %s" % (txc)) if txd.global_id not in ids: self.fail("Recovered xids not as expected. missing: %s" % (txd)) self.assertEqual(2, len(xids)) queues = ["queue-a1", "queue-a2", "queue-b1", "queue-b2", "queue-c1", "queue-c2", "queue-d1", "queue-d2"] not_empty = ["queue-a2", "queue-b1"] #re-check not_empty = ["queue-a2", "queue-b1"] for q in queues: if q in not_empty: self.assertEqual(1, session.queue_query(queue=q).message_count, "Bad count for %s" % (q)) else: self.assertEqual(0, session.queue_query(queue=q).message_count, "Bad count for %s" % (q)) #complete the prepared transactions self.assertEqual(self.XA_OK, session.dtx_commit(xid=txc).status) self.assertEqual(self.XA_OK, session.dtx_rollback(xid=txd).status) not_empty.append("queue-c2") not_empty.append("queue-d1") for q in queues: if q in not_empty: self.assertEqual(1, session.queue_query(queue=q).message_count) else: self.assertEqual(0, session.queue_query(queue=q).message_count) def phase7(self): session = self.session session.synchronous = False # check xids from phase 6 are gone txc = self.xid('c') txd = self.xid('d') xids = session.dtx_recover().in_doubt ids = [x.global_id for x in xids] #TODO: come up with nicer way to test these if txc.global_id in ids: self.fail("Xid still present : %s" % (txc)) if txd.global_id in ids: self.fail("Xid still present : %s" % (txc)) self.assertEqual(0, len(xids)) #test deletion of queue after publish #create queue session.queue_declare(queue = "q", auto_delete=True, durable=True) #send message for i in range(1, 10): session.message_transfer(message=self.createMessage(routing_key = "q", body = "my-message")) session.synchronous = True #explicitly delete queue session.queue_delete(queue = "q") #test acking of message from auto-deleted queue #create queue session.queue_declare(queue = "q", auto_delete=True, durable=True) #send message session.message_transfer(message=self.createMessage(routing_key = "q", body = "my-message")) #create consumer session.message_subscribe(queue = "q", destination = "a", accept_mode=0, acquire_mode=0) session.message_flow(unit = 1, value = 0xFFFFFFFF, destination = "a") session.message_flow(unit = 0, value = 10, destination = "a") queue = session.incoming("a") #consume the message, cancel subscription (triggering auto-delete), then ack it msg = queue.get(timeout = 5) session.message_cancel(destination = "a") self.ack(msg) #test implicit deletion of bindings when queue is deleted session.queue_declare(queue = "durable-subscriber-queue", exclusive=True, durable=True) session.exchange_bind(exchange="amq.topic", queue="durable-subscriber-queue", binding_key="xyz") session.message_transfer(destination= "amq.topic", message=self.createMessage(routing_key = "xyz", body = "my-message")) session.queue_delete(queue = "durable-subscriber-queue") #test unbind: #create a series of bindings to a queue session.queue_declare(queue = "binding-test-queue", durable=True) session.exchange_bind(exchange="amq.direct", queue="binding-test-queue", binding_key="abc") session.exchange_bind(exchange="amq.direct", queue="binding-test-queue", binding_key="pqr") session.exchange_bind(exchange="amq.direct", queue="binding-test-queue", binding_key="xyz") session.exchange_bind(exchange="amq.match", queue="binding-test-queue", binding_key="a", arguments={"x-match":"all", "p":"a"}) session.exchange_bind(exchange="amq.match", queue="binding-test-queue", binding_key="b", arguments={"x-match":"all", "p":"b"}) session.exchange_bind(exchange="amq.match", queue="binding-test-queue", binding_key="c", arguments={"x-match":"all", "p":"c"}) #then restart broker... def phase8(self): session = self.session #continue testing unbind: #send messages to the queue via each of the bindings for k in ["abc", "pqr", "xyz"]: data = "first %s" % (k) session.message_transfer(destination= "amq.direct", message=self.createMessage(routing_key=k, body=data)) for a in [{"p":"a"}, {"p":"b"}, {"p":"c"}]: data = "first %s" % (a["p"]) session.message_transfer(destination="amq.match", message=self.createMessage(application_headers=a, body=data)) #unbind some bindings (using final 0-10 semantics) session.exchange_unbind(exchange="amq.direct", queue="binding-test-queue", binding_key="pqr") session.exchange_unbind(exchange="amq.match", queue="binding-test-queue", binding_key="b") #send messages again for k in ["abc", "pqr", "xyz"]: data = "second %s" % (k) session.message_transfer(destination= "amq.direct", message=self.createMessage(routing_key=k, body=data)) for a in [{"p":"a"}, {"p":"b"}, {"p":"c"}]: data = "second %s" % (a["p"]) session.message_transfer(destination="amq.match", message=self.createMessage(application_headers=a, body=data)) #check that only the correct messages are received expected = [] for k in ["abc", "pqr", "xyz"]: expected.append("first %s" % (k)) for a in [{"p":"a"}, {"p":"b"}, {"p":"c"}]: expected.append("first %s" % (a["p"])) for k in ["abc", "xyz"]: expected.append("second %s" % (k)) for a in [{"p":"a"}, {"p":"c"}]: expected.append("second %s" % (a["p"])) session.message_subscribe(queue = "binding-test-queue", destination = "binding-test") session.message_flow(unit = 1, value = 0xFFFFFFFF, destination = "binding-test") session.message_flow(unit = 0, value = 10, destination = "binding-test") queue = session.incoming("binding-test") while len(expected): msg = queue.get(timeout=1) if msg.body not in expected: self.fail("Missing message: %s" % msg.body) expected.remove(msg.body) try: msg = queue.get(timeout=1) self.fail("Got extra message: %s" % msg.body) except Empty: pass session.queue_declare(queue = "durable-subscriber-queue", exclusive=True, durable=True) session.exchange_bind(exchange="amq.topic", queue="durable-subscriber-queue", binding_key="xyz") session.message_transfer(destination= "amq.topic", message=self.createMessage(routing_key = "xyz", body = "my-message")) session.queue_delete(queue = "durable-subscriber-queue") def xid(self, txid, branchqual = ''): return self.session.xid(format=0, global_id=txid, branch_id=branchqual) def txswap(self, src, dest, tx): self.assertEqual(self.XA_OK, self.session.dtx_start(xid=tx).status) self.session.message_subscribe(destination="temp-swap", queue=src, accept_mode=0) self.session.message_flow(destination="temp-swap", unit=0, value=1) self.session.message_flow(destination="temp-swap", unit=1, value=0xFFFFFFFF) msg = self.session.incoming("temp-swap").get(timeout=1) self.session.message_cancel(destination="temp-swap") self.session.message_transfer(message=self.createMessage(routing_key=dest, correlation_id=self.getProperty(msg, 'correlation_id'), body=msg.body)) self.ack(msg) self.assertEqual(self.XA_OK, self.session.dtx_end(xid=tx).status) def assertEmptyQueue(self, name): self.assertEqual(0, self.session.queue_query(queue=name).message_count) def assertConnectionException(self, expectedCode, message): self.assertEqual("connection", message.method.klass.name) self.assertEqual("close", message.method.name) self.assertEqual(expectedCode, message.reply_code) def assertExpectedMethod(self, reply, klass, method): self.assertEqual(klass, reply.method.klass.name) self.assertEqual(method, reply.method.name) def assertExpectedContent(self, msg, id, body): self.assertEqual(id, self.getProperty(msg, 'correlation_id')) self.assertEqual(body, msg.body) return msg def getProperty(self, msg, name): for h in msg.headers: if hasattr(h, name): return getattr(h, name) return None def ack(self, *msgs): session = self.session set = RangedSet() for m in msgs: set.add(m.id) #TODO: tidy up completion session.receiver._completed.add(m.id) session.message_accept(set) session.channel.session_completed(session.receiver._completed) def assertExpectedGetResult(self, id, body): return self.assertExpectedContent(session.incoming("incoming-gets").get(timeout=1), id, body) def assertEqual(self, expected, actual, msg=''): if expected != actual: raise Exception("%s expected: %s actual: %s" % (msg, expected, actual)) def assertMessageOnQueue(self, queue, id, body): self.session.message_subscribe(destination="incoming-gets", queue=queue, accept_mode=0) self.session.message_flow(destination="incoming-gets", unit=0, value=1) self.session.message_flow(destination="incoming-gets", unit=1, value=0xFFFFFFFF) msg = self.session.incoming("incoming-gets").get(timeout=1) self.assertExpectedContent(msg, id, body) self.ack(msg) self.session.message_cancel(destination="incoming-gets") def __init__(self): TestBase010.__init__(self, "run") self.setBroker("localhost") self.errata = [] def connect(self): """ Connects to the broker """ self.conn = Connection(connect(self.host, self.port)) self.conn.start(timeout=10) self.session = self.conn.session("test-session", timeout=10) def run(self, args=sys.argv[1:]): try: opts, extra = getopt(args, "r:s:e:b:p:h", ["retry=", "spec=", "errata=", "broker=", "phase=", "help"]) except GetoptError, e: self._die(str(e)) phase = 0 retry = 0; for opt, value in opts: if opt in ("-h", "--help"): self._die() if opt in ("-s", "--spec"): self.spec = value if opt in ("-e", "--errata"): self.errata.append(value) if opt in ("-b", "--broker"): self.setBroker(value) if opt in ("-p", "--phase"): phase = int(value) if opt in ("-r", "--retry"): retry = int(value) if not phase: self._die("please specify the phase to run") phase = "phase%d" % phase self.connect() try: getattr(self, phase)() print phase, "succeeded" res = True; except Exception, e: print phase, "failed: ", e traceback.print_exc() res = False if not self.session.error(): self.session.close(timeout=10) self.conn.close(timeout=10) # Crude fix to wait for thread in client to exit after return from session_close() # Reduces occurrences of "Unhandled exception in thread" messages after each test import time time.sleep(1) return res def setBroker(self, broker): rex = re.compile(r""" # [ <user> [ / <password> ] @] <host> [ :<port> ] ^ (?: ([^/]*) (?: / ([^@]*) )? @)? ([^:]+) (?: :([0-9]+))?$""", re.X) match = rex.match(broker) if not match: self._die("'%s' is not a valid broker" % (broker)) self.user, self.password, self.host, self.port = match.groups() self.port = int(default(self.port, 5672)) self.user = default(self.user, "guest") self.password = default(self.password, "guest") def _die(self, message = None): if message: print message print """ Options: -h/--help : this message -s/--spec <spec.xml> : file containing amqp XML spec -p/--phase : test phase to run -b/--broker [<user>[/<password>]@]<host>[:<port>] : broker to connect to """ sys.exit(1) def default(value, default): if (value == None): return default else: return value if __name__ == "__main__": test = PersistenceTest() if not test.run(): sys.exit(1)
Built from impact-resistant plastic (IK08), the RS PRO 5800 LED is dust-tight and protected against jets of water (IP66). I was very pleased to search out this internet-site.I wished to thanks in your time for this wonderful read!! I definitely having fun with every little bit of it and I've you bookmarked to take a look at new stuff you blog post. Find a great selection of Longchamp shoulder bags at UK. Shop the latest collection. Totally free shipping and returns. Christ is born is a type of wood which is good for the the furniture as well as for the following wood.It also involves in the purification of the following era statement .The wood protector is the best thing for the material. The fastco first aid message monday more safety tips should be timed and chest compressions,keep it up. Thanks for updating with this fastco blog on the 30 to 2 breaths really very interesting first aid message topics always.
#!/usr/bin/env python2 # -*- coding: utf-8 -*- # Copyright (C) 2013, Cameron White from .tools import waiting_effects from github import Github from github.GithubException import BadCredentialsException, \ TwoFactorException, GithubException from github.Authorization import Authorization from PyQt4.QtCore import QRegExp from PyQt4.QtGui import QWizardPage, QWizard, QRadioButton, QLineEdit, \ QRegExpValidator, QVBoxLayout, QLabel, QFormLayout, QValidator class GithubCredentialsWizardPage(QWizardPage): def __init__(self, parent=None): super(GithubCredentialsWizardPage, self).__init__( parent, title="Credentials", subTitle="Enter your username/password or token") # Radio Buttons self.userPassRadioButton = QRadioButton() self.userPassRadioButton.toggled.connect(self.changeMode) self.userPassRadioButton.toggled.connect(self.completeChanged.emit) self.tokenRadioButton = QRadioButton() self.tokenRadioButton.toggled.connect(self.changeMode) self.tokenRadioButton.toggled.connect(self.completeChanged.emit) # LineEdits # usernameEdit self.usernameEdit = QLineEdit( textChanged=self.completeChanged.emit) # Username may only contain alphanumeric characters or dash # and cannot begin with a dash self.usernameEdit.setValidator( QRegExpValidator(QRegExp('[A-Za-z\d]+[A-Za-z\d-]+'))) # passwordEdit self.passwordEdit = QLineEdit( textChanged=self.completeChanged.emit) self.passwordEdit.setValidator( QRegExpValidator(QRegExp('.+'))) self.passwordEdit.setEchoMode(QLineEdit.Password) # tokenEdit self.tokenEdit = QLineEdit( textChanged=self.completeChanged.emit) # token may only contain alphanumeric characters self.tokenEdit.setValidator( QRegExpValidator(QRegExp('[A-Za-z\d]+'))) self.tokenEdit.setEchoMode(QLineEdit.Password) # Form form = QFormLayout() form.addRow("<b>username/password</b>", self.userPassRadioButton) form.addRow("username: ", self.usernameEdit) form.addRow("password: ", self.passwordEdit) form.addRow("<b>token</b>", self.tokenRadioButton) form.addRow("token: ", self.tokenEdit) # Layout self.mainLayout = QVBoxLayout() self.mainLayout.addLayout(form) self.setLayout(self.mainLayout) # Fields self.registerField("username", self.usernameEdit) self.registerField("password", self.passwordEdit) self.registerField("token", self.tokenEdit) self.userPassRadioButton.toggle() self.require_2fa = False def changeMode(self): if self.userPassRadioButton.isChecked(): self.usernameEdit.setEnabled(True) self.passwordEdit.setEnabled(True) self.tokenEdit.setEnabled(False) elif self.tokenRadioButton.isChecked(): self.usernameEdit.setEnabled(False) self.passwordEdit.setEnabled(False) self.tokenEdit.setEnabled(True) def nextId(self): if self.require_2fa: return 2 # TODO remove magic number else: return 3 # TODO remove magic number def isComplete(self): if self.userPassRadioButton.isChecked(): usernameValidator = self.usernameEdit.validator() usernameText = self.usernameEdit.text() usernameState = usernameValidator.validate(usernameText, 0)[0] passwordValidator = self.passwordEdit.validator() passwordText = self.passwordEdit.text() passwordState = passwordValidator.validate(passwordText, 0)[0] if usernameState == QValidator.Acceptable and \ passwordState == QValidator.Acceptable: return True elif self.tokenRadioButton.isChecked(): tokenValidator = self.tokenEdit.validator() tokenText = self.tokenEdit.text() tokenState = tokenValidator.validate(tokenText, 0)[0] if tokenState == QValidator.Acceptable: return True return False @waiting_effects def validatePage(self): # TODO - clean this up if self.userPassRadioButton.isChecked(): username = str(self.field('username').toString()) password = str(self.field('password').toString()) try: g = Github(username, password) user = g.get_user() authentication = user.create_authorization(scopes=['repo'], note='test') except TwoFactorException: self.require_2fa = True return True except GithubException: self.require_2fa = False return False self.setField('token', str(authentication.token)) self.require_2fa = False return True elif self.tokenRadioButton.isChecked(): token = str(self.field('token').toString()) try: self.setField('username', Github(token).get_user().login) except BadCredentialsException: return False else: self.require_2fa = False return True else: self.require_2fa = False return False class AccountTypeWizardPage(QWizardPage): def __init__(self, parent=None): super(AccountTypeWizardPage, self).__init__( parent, title="Select Account Type", subTitle="Select the type of account to create") # Radio Buttons self.githubRadioButton = QRadioButton("Github account") self.githubRadioButton.toggle() # Layout self.mainLayout = QVBoxLayout() self.mainLayout.addWidget(self.githubRadioButton) self.setLayout(self.mainLayout) def nextId(self): if self.githubRadioButton.isChecked(): return 1 # TODO remove magic number class Github2FAWizardPage(QWizardPage): def __init__(self, parent=None): super(Github2FAWizardPage, self).__init__( parent, title="Two-Factor Authentication", subTitle="Enter required authentication code") # LineEdits self.codeEdit = QLineEdit() # codeEdit may only contain 1 or more digits self.codeEdit.setValidator(QRegExpValidator(QRegExp(r'[\d]+'))) # Form self.form = QFormLayout() self.form.addRow("Code: ", self.codeEdit) # Layout self.setLayout(self.form) # Fields self.registerField('2fa_code*', self.codeEdit) def nextId(self): return 3 # TODO remove magic number @waiting_effects def validatePage(self): username = str(self.field('username').toString()) password = str(self.field('password').toString()) code = str(self.field('2fa_code').toString()) try: # to use 2fa code g = Github(username, password) user = g.get_user() authentication = user.create_authorization( scopes=['repo'], note='test', onetime_password=code) except GithubException: self.wizard().back() # start over TODO make sure this works return False self.setField('token', str(authentication.token)) return True class UserSummaryWizardPage(QWizardPage): def __init__(self, parent=None): super(UserSummaryWizardPage, self).__init__( parent, title="Summary", subTitle="Summary of new user account") # labels self.usernameLabel = QLabel() self.tokenLabel = QLabel() # form self.form = QFormLayout() self.form.addRow("username: ", self.usernameLabel) self.form.addRow("token: ", self.tokenLabel) # layout self.setLayout(self.form) def initializePage(self): self.usernameLabel.setText(self.field('username').toString()) self.tokenLabel.setText(self.field('token').toString()) class AddAccountWizard(QWizard): def __init__(self, parent=None): super(AddAccountWizard, self).__init__( parent, windowTitle="Sign In") # TODO - remove magic numbers self.setPage(0, AccountTypeWizardPage()) self.setPage(1, GithubCredentialsWizardPage()) self.setPage(2, Github2FAWizardPage()) self.setPage(3, UserSummaryWizardPage())
16 November 2018. Prospa is ranked at #5 in the Leadership category and #20 overall. The Leadership category recognises those technology companies who have won a place in the Technology Fast 50 Australia list and whose strong track record of growth has generated cumulative operating revenues greater than $50 million. Joshua Tanchel, Deloitte Technology Fast 50 lead partner, said companies like Prospa “are genuine success stories for Australia and reminders of how fresh thinking fintechs are fundamentally changing the ways we pay for goods and manage our finances.” Full list. 31 October 2018. Prospa is ranked at #9 with three year average YOY growth of 116.1%. According to the AFR “…a cashflow-challenged, credit-constrained future is hinted at by list-topper Next Business Energy, which doesn’t wait for customers to call before applying its best discount to them, by sixth-placed Cashrewards, a coupon and cashback site, and by ninth-placed Prospa, which lends to small businesses where banks no longer will.” Full list. September 2018. The LinkedIn Top Startups list honours the 25 most sought-after startups to work for in Australia. Prospa is also the highest placing Fintech. The ranking is derived from a blended score looking at factors including interest in the company, engagement with employees, job interest and retention, and is informed by the billions of actions taken by more than 575 million professionals on LinkedIn. Full list. August 2018. Prospa awarded Best Employer for the second year running in the annual AON Best Employers Program. Beau Bertoli, said, “Being recognised as an employer of choice two years in a row is simply phenomenal. And very humbling. Our team take a lot of pride in their achievements but this is top of the list: being amongst the best places to work in Australia/New Zealand. July 2018. Prospa achieved a clean sweep of the MFAA Excellence Awards for Fintech Lender of the Year, winning the award in all five states in Australia. The MFAA Excellence Awards are presented to members of the MFAA who have demonstrated quality customer service, professionalism, innovation, ethics and excellence in their field. They are judged by an independent panel of business professionals and experts. February 2018. Prospa topped the Financial Times’ inaugural list of 1,000 high-growth companies, in revenue terms, from the Asia-Pacific region. The expansion of digital services combined with a desire for flexible financing fuelled Prospa’s growth. 8 November 2017. Prospa is ranked #2 with three year average YOY growth of 239.1%. As reported, “Co-chief executive Beau Bertoli said the real reason Prospa had been able make more than $500 million of loans to 12,000 customers since its 2011 birth was that banks had never developed a comprehensive lending product for small businesses. Banks do very well from consumer lending and big business lending, and SMEs have been left in the too-hard basket,” Mr Bertoli said. Full list. October 2017. Greg Moshal & Beau Bertoli were jointly awarded a Special Recognition Award for 2017 and inducted into the Hall of Fame. These Awards recognise individuals who have made an outstanding contribution to the Australian tech industry. They are unique in their design, requiring nomination for entry and then a vote by previous winners of the award to determine recipients. October 2017. Prospa was named #24 globally and the best Australian company in KPMG’s FOURTH annual global Fintech100 list. Joint CEO Beau Bertoli said, “We’re the highest ranking Australian innovator and for us as a company, it’s really satisfying to be leading the charge for Australian fintechs.” Check out the full list. August 2017. Prospa awarded Best Employer in 2017 in the annual AON Best Employers Program which measures employee engagement, leadership effectiveness, employer brand and culture that drives high performance. 18 July 2017. Prospa has been awarded the 2017 Telstra New South Wales Medium Business Award in the 2017 Telstra Business Awards. Read full article. 24 May 2017. Prospa proved its influence and leadership by winning the headline Fintech Leader of the Year award, with Greg Moshal and Beau Bertoli crowned as joint winners in a tightly contested field. We also scored the excellence award for best Fintech Place to Work, against a strong field of finalists. Read the full article. 24 February 2017. This award recognises the top performing company in its field, assisting retail consumers or businesses to access capital, mortgage or personal loan products through innovative financial technology. A full list of winners can be found here. 3 November 2016. 2016 was the 10th year of the Smart50 Awards, which recognise Australia’s top 50 fastest growing SMEs. Read more. October 2016. Prospa was named #31 globally and the best Australian company in KPMG’s third annual global Fintech100 list. Joint CEO Beau Bertoli said, “We’re the highest ranking Australian innovator and for us as a company, it’s really satisfying to be leading the charge for Australian fintechs.” Check out the full list. 28 July 2016. Prospa is Australia’s fastest growing technology business, the leading online lender to small business and now formally transitioning from start-up to ‘grown-up’. Last year we were a finalist in the start-up category for businesses three years old and under, this year we are a finalist in the medium business category for businesses with more than 50 employees and less than 400. And we are only four years in operation. This recognition demonstrates it’s possible to experience exponential growth of 6791% and simultaneously build a high quality organisation. Prospa was named Australia’s fastest growing technology company at the annual Australian Deloitte Technology Fast 50 awards held at the ASX on Thursday 19 November 2015. Prospa led the pack with a phenomenal 6,971 percent revenue growth from 2013 to 2015. This prestigious award is a true recognition of Prospa’s bold innovation, dedicated leadership and strong growth. To qualify for a place in the top 50 Deloitte Australian TechFast 50, each technology company has to accumulate more than $8 million in revenue over the three year period from 2013 to 2015. Read the full list of winners of the Technology Fast 500 Asia Pacific program for 2015 and details about the nomination and eligibility criteria of the program. Prospa was selected as a finalist in the 2015 Telstra Business Awards for NSW. The Start Up Business category applies to businesses that are between one and three years old with up to 200 employees. This award follows some significant milestones for Prospa: over 50 employees, over 1,000 customers, ASIC licence and first securitistation. In December 2015, Prospa was named #33 in the top 50 fintech innovators in the world in a survey by KPMG-backed Fintech Innovators, a joint venture between H2 Ventures and KPMG. H2 Ventures founding partner Toby Heap said Australia was “punching above its weight” in having nine companies in the global list.
# -*- coding: utf-8 -*- # Flask-Diamond (c) Ian Dennis Miller import re import os import codecs from setuptools import setup, find_packages def read(*rnames): return codecs.open(os.path.join(os.path.dirname(__file__), *rnames), 'r', 'utf-8').read() def grep(attrname): pattern = r"{0}\W*=\W*'([^']+)'".format(attrname) strval, = re.findall(pattern, read('flask_diamond/__meta__.py')) return strval setup( version=grep('__version__'), name='Flask-Diamond', description=( "Flask-Diamond is a batteries-included Flask framework, " "sortof like Django but radically decomposable. " "Flask-Diamond offers some opinions about " "data-centric Internet applications and systems." ), packages=find_packages(), scripts=[ "bin/flask-diamond", ], long_description=read('Readme.rst'), classifiers=[ "Development Status :: 4 - Beta", "Framework :: Flask", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Operating System :: MacOS :: MacOS X", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Internet :: WWW/HTTP", ], include_package_data=True, keywords='', author=grep('__author__'), author_email=grep('__email__'), url=grep('__url__'), install_requires=read('requirements.txt'), license='MIT', zip_safe=False, )
Intelligence has proved to be one of the most desirable qualities in an individual — second only, perhaps, to beauty. At the very least, intelligence earns respect, at the highest level it earns awe and fame — and money. And yet, intelligence has been pigeonholed into the narrowest of definitions, limited to a pre-determined set of attitudes and capabilities. In the sphere of learning, a child’s progress is greatly hindered by this pigeonholing; not recognising the potential in a multitude of other fields. In 1983, American developmental psychologist Howard Gardner defined no less than nine types of intelligences that can be possessed by an individual. The intelligences defined by him are as follows: naturalist intelligence, which designates the human ability to discriminate among living things (plants, animals) as well as sensitivity to other features of the natural world (clouds, rock configurations). This type of intelligence most obviously is of core value to scientists and environmentalists, but also plays a very significant role in technology and design — because it indicates an individual that pays attention to detail and to the universe around him. The second category defined is musical intelligence, which is the capacity to discern pitch, rhythm, timbre, and tone, enabling a person to recognise, create, reproduce, and reflect on music. The applications of that one, of course, are self-explanatory. The third kind of intelligence is logical-mathematical intelligence which indicates an inclination towards systematic problem solving, logic, reasoning, inductive and deductive thinking patterns. The fourth is one of the less obvious ones — termed Existential intelligence. This is the kind that’s possessed by philosophers and great writers — a capacity to contemplate deep existential questions of who we are, where we originated, the meaning and purpose of life, and so forth. And then comes interpersonal intelligence, which denotes the ability to understand and interact effectively with others through verbal and nonverbal communication, sensitivity to the temperaments of others, and the ability to entertain multiple perspectives. Interestingly, these skills also mirror the qualities of a good leader. The sixth kind is bodily-kinesthetic intelligence which involves the ability to co-ordinate the mind and body, to use the body’s skills to perfection — as displayed by athletes, dancers, surgeons and the like. The seventh category belongs to linguistic intelligence, which indicates expertise in expressing and articulating, and working with language in myriad ways. The eighth one is again intriguing in its description, for it is intra-personal intelligence — again a trait exhibited by philosophers and spiritualists, even writers and artists — which denotes an ability to deeply understand the self: one’s thoughts, emotions and ideas, which in a larger perspective denotes an ability to understand deeply the human condition. The ninth and final kind of intelligence is again fascinating: Spatial intelligence — the ability to think in three dimensions. This denotes a capacity for mental imagery, spatial reasoning, image manipulation, graphic and artistic skills, and an active imagination. Instances of this kind of intelligence may be found in sailors, pilots, sculptors, painters, architects, graphic designers and the like. At the time when Gardner proposed these categories, intelligence was considered only to be mathematical of linguistic. But the division of intelligence into such varying streams opens up a limitless universe of endless possibilities before the individual, every single one of them crucial to pushing the pinnacle of human achievement and endeavour. It brings colour and diversity to our universe which is but obviously dynamic in nature. To reduce human capacity and intelligence to just two categories is to paint the world in black and white. And that would be a sad world indeed without all its brilliant colours.
from cornice import Service from colander import MappingSchema, SchemaNode, String import facebook from semantica_rt_py.persistence_config import mongodb FB_APP_ID = '156235004569304' class AddPageSchema(MappingSchema): user_token = SchemaNode(String(), location='body', type='str') page_id = SchemaNode(String(), location='body', type='str') pages = Service(name='pages', path='/pages', description="Pages service", renderer='bson') @pages.post(schema=AddPageSchema) def post_pages(request): page_id = request.validated['page_id'] token = request.validated['user_token'] graph = facebook.GraphAPI(token) try: graph.put_object(page_id, 'tabs', app_id=FB_APP_ID) except Exception as e: request.errors.add(None, 'facebook', e.message) return {} page_ = mongodb.pages.find_and_modify(query={'page_id': page_id}, update={ '$set': {'page_id': page_id, 'user_token': token}}, upsert=True) return page_ class GetUpdatesSchema(MappingSchema): page_id = SchemaNode(String(), location="querystring", type='str', required=False) updates = Service(name='updates', path='/updates', description='Updates service', renderer='bson') @updates.get(schema=GetUpdatesSchema) def get_updates(request): q = {'already_sent': 0} if request.validated['page_id']: q.update({'page_id': request.validated['page_id']}) mongodb.updates.update(q, {'$set': {'already_sent': 1}}, multi=True) updates = mongodb.updates.find(q) updates['_id'] = str(updates['_id']) return updates
According to preliminary estimates, over 10,000 farmers suffered losses, with officials estimating the damage to be Rs 1,647 lakh. Over 10.3 lakh hectares of agricultural crop and more than 10,000 hectares of horticulture crops suffered damage in Andhra Pradesh due to Cyclone Phethai, as per the preliminary estimates made by Real Time Governance Society (RTGS). RTGS is the command centre in Amaravati which monitored the situation and sent quick updates to the respective authorities and departments. Authorities estimate that 140 mandals in the state suffered agricultural damage. East and West Godavari were the worst-hit with the cyclone wreaking havoc in 30 mandals each in these districts. In Krishna district, 24 mandals suffered heavy damage while in Guntur 23 mandals were affected. According to authorities, the damage estimates are as follows: 11,711 hectares of rice, 7,025 hectares of maize, 2,296 hectares of dal, 1,221 hectares of tobacco, 566 hectares of sunflower and 394.8 hectares of sugarcane. RGTS made a preliminary assessment that over 10,000 farmers suffered losses in horticulture and also estimated that the loss would be Rs 1,647 lakh. Meanwhile, authorities also said that they had set up 878 health centres across Srikakulam, East Godavari, West Godavari, Krishna, Visakhapatnam and Vizianagaram. They also recorded 26 cases of diarrhea in Visakhapatnam. RTGS also said that they have set up 533 relief centres across East Godavari, Krishna, Visakhapatnam, Vijayanagaram, West Godavari and Guntur districts, and served food for over 55,000 people. As per its estimates, a total of 12,094 mobile towers were damaged in the state on Monday. Cyclone Phethai made landfall on Monday near Katrenikona in East Godavari, affecting the coastal districts with heavy rainfall, following which train services and flights were cancelled, and power supply was also disrupted. However, the authorities swiftly restored the power and cleared the roads, as relief teams were put on standby equipped with earthmovers.
from django.contrib import admin from django.contrib.contenttypes import generic from models import Attribute, BaseModel from django.utils.translation import ugettext_lazy as _ class MetaInline(generic.GenericTabularInline): model = Attribute extra = 0 class BaseAdmin(admin.ModelAdmin): """ def get_readonly_fields(self, request, obj=None): fs = super(BaseAdmin, self).get_readonly_fields(request, obj) fs += ('created_by', 'last_updated_by',) return fs def get_fieldsets(self, request, obj=None): fs = super(BaseAdmin, self).get_fieldsets(request, obj) fs[0][1]['fields'].remove('created_by') fs[0][1]['fields'].remove('last_updated_by') fs.extend([(_('Other informations'), {'fields':['created_by','last_updated_by'], 'classes':['collapse']})]) return fs def changelist_view(self, request, extra_context=None): if request.user.has_perm('%s.can_view_deleted' % self.model._meta.app_label): if not "deleted_flag" in self.list_filter: self.list_filter += ("deleted_flag",) return super(BaseAdmin, self).changelist_view(request, extra_context) def queryset(self, request): return super(BaseAdmin, self).queryset(request).exclude(deleted_flag=True) """ def save_model(self, request, obj, form, change): if not change: obj.created_by = request.user obj.last_updated_by = request.user obj.save() def save_formset(self, request, form, formset, change): instances = formset.save(commit=False) for instance in instances: if isinstance(instance, BaseModel): #Check if it is the correct type of inline if not instance.created_by_id: instance.created_by = request.user instance.last_updated_by = request.user instance.save()
Customer’s plant was located in the South and had several roof top ventilation fans powered by buckets in MCC’s throughout the plant. Personnel had access to the controls of the buckets and were able to disrupt the balance of air flow in the plant creating hot spots and negative air pressure areas. Logicon was asked to integrate new MCC’s with DeviceNet and a central control screen in the maintenance shop to manage the control of the air flow in the plant. Ventilation fans were over packaging areas of the plant where no air conditioning was present. Each area also included some process or packaging ovens increasing the heat rise in the areas. The customer ordered new Allen-Bradley MCC units that included DeviceNet starters and E1 Plus modules. A single DNET card was selected for each of the MCCs to be installed isolating the DNET networks to each room. The maintenance shop was selected as the location for the small panel to house the PLC and Panelview given its central location in the plant. This minimized the trunk length of the DNET cables. The HMI was designed with snapshots of the composite floor plan of the plant to give a reference for location in the facility. An animated fan graphic was placed on top of the floor plan with the supply or exhaust designation and number of the unit. Each fan had a popup screen for status and control so parameter files were used in the FactoryTalk View ME development to minimize screens. Since there were nearly 50 fans in the plant to be added to the system over time, a User Defined Data Type (UDT) was built in the RSLogix5000 software that included all control, status, features and settings the plant needed for a typical fan. This made the development of the code or the addition of future fans quick and easy. The temperature probes were a good distance away from the main panel, so to minimize installation and wiring costs, a wireless unit was selected from Banner. The receiver or gateway was installed at the main panel to communicate to the sensors. The temperatures were used in the code to automatically activate certain fan pairings and the status was displayed on the HMI in the maintenance shop. Two new MCC’s were installed with buckets that only included indicator lights for on/off status eliminating personnel interaction. Wireless temperature probes we installed in specific areas to control on/off timing of sections of fans and a special pairing list was developed by a 3rd party to properly group the appropriate exhaust and supply fans together to manage air pressures in the plant. The plant now has control over the air flow in the plant and has a visual aid on the PanelView indicating what the zone temperatures are and which fans are running.
#*************************************************************** # This file is part of Paintomics v3 # # Paintomics is free software: you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # Paintomics is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Paintomics. If not, see <http://www.gnu.org/licenses/>. # # More info http://bioinfo.cipf.es/paintomics # Technical contact paintomics@cipf.es #************************************************************** import os, shutil import logging import logging.config from flask import send_from_directory from src.conf.serverconf import CLIENT_TMP_DIR from src.classes.File import File from src.common.UserSessionManager import UserSessionManager from src.common.DAO.FileDAO import FileDAO from src.common.DAO.JobDAO import JobDAO from src.common.ServerErrorManager import handleException def dataManagementUploadFile(request, response, DESTINATION_DIR, isReference=False): #VARIABLE DECLARATION fileInstance = None daoInstance = None try: #**************************************************************** # Step 0.CHECK IF VALID USER SESSION #**************************************************************** logging.info("STEP0 - CHECK IF VALID USER....") userID = request.cookies.get('userID') userName = request.cookies.get('userName') sessionToken = request.cookies.get('sessionToken') UserSessionManager().isValidUser(userID, sessionToken) #ONLY ADMIN USER (id=0) CAN UPLOAD NEW INBUILT GTF FILES if(isReference and UserSessionManager().isValidAdminUser(userID, userName, sessionToken)): userID="-1" #**************************************************************** #1. SAVE THE UPLOADED FILE TO THE USER DIRECTORY AND TO THE DATABASE #**************************************************************** logging.info("STEP1 - FILE UPLOADING REQUEST RECEIVED") formFields = request.form uploadedFiles = request.files if not isReference: DESTINATION_DIR = DESTINATION_DIR + userID + "/inputData/" else: userID="-1" DESTINATION_DIR = DESTINATION_DIR + "GTF/" logging.info("STEP1 - READING FILES....") fields = {} for field in formFields.keys(): if formFields[field] == "undefined": continue fields[field] = formFields[field] if isReference and formFields.get("fileName", None) != None: registerFile(userID, formFields.get("fileName"), fields, DESTINATION_DIR) else: for uploadedFileName in uploadedFiles.keys(): if (uploadedFileName is not None): #GET THE FILE OBJECT uploadedFile = request.files.get(uploadedFileName) uploadedFileName = uploadedFile.filename saveFile(userID, uploadedFileName, fields, uploadedFile, DESTINATION_DIR) response.setContent({"success": True}) except Exception as ex: handleException(response, ex, __file__ , "dataManagementUploadFile") finally: if(daoInstance != None): daoInstance.closeConnection() return response def dataManagementGetMyFiles(request, response, DESTINATION_DIR, MAX_CLIENT_SPACE, isReference=False): #VARIABLE DECLARATION fileInstance = None fileInstances = [] daoInstance = None try: #**************************************************************** # Step 0.CHECK IF VALID USER SESSION #**************************************************************** logging.info("STEP0 - CHECK IF VALID USER....") userID = request.cookies.get('userID') sessionToken = request.cookies.get('sessionToken') UserSessionManager().isValidUser(userID, sessionToken) if not isReference: DESTINATION_DIR += userID else: userID="-1" DESTINATION_DIR += "GTF/" #**************************************************************** # Step 1.GET THE LIST OF FILES #**************************************************************** logging.info("STEP1 - GET MY FILE LIST REQUEST RECEIVED") daoInstance = FileDAO() matchedFiles = daoInstance.findAll(otherParams={"userID":userID}) logging.info("STEP1 - GET MY FILE LIST REQUEST RECEIVED...DONE") #**************************************************************** # Step 2.CALCULATE USED SPACE #**************************************************************** logging.info("STEP2 - GET THE CURRENT USED SPACE...") dataSummary = {"usedSpace" : dir_total_size(DESTINATION_DIR), "availableSpace": MAX_CLIENT_SPACE} logging.info("STEP2 - GET THE CURRENT USED SPACE...DONE") response.setContent({"success": True, "fileList" : matchedFiles, "dataSummary" : dataSummary }) except Exception as ex: handleException(response, ex, __file__ , "dataManagementGetMyFiles") finally: if(daoInstance != None): daoInstance.closeConnection() return response def dataManagementDeleteFile(request, response, DESTINATION_DIR, MAX_CLIENT_SPACE, isReference=False, fileName=None): #VARIABLE DECLARATION daoInstance = None try: #**************************************************************** # Step 0.CHECK IF VALID USER SESSION #**************************************************************** logging.info("STEP0 - CHECK IF VALID USER....") userID = request.cookies.get('userID') userName = request.cookies.get('userName') sessionToken = request.cookies.get('sessionToken') if (userID is None): response.setContent({"success": False, "errorMessage": "Log in required</br>Sorry but the feature you are requesting is only available to registered accounts."}) else: UserSessionManager().isValidUser(userID, sessionToken) #ONLY ADMIN USER (id=0) CAN UPLOAD NEW INBUILT GTF FILES if(isReference and UserSessionManager().isValidAdminUser(userID, userName, sessionToken)): userID="-1" if not isReference: DESTINATION_DIR += userID + "/inputData/" else: userID="-1" DESTINATION_DIR += "GTF/" #**************************************************************** # Step 1. GET THE LIST OF JOB IDs #**************************************************************** if fileName == None: fileName = request.form.get("fileName") files = fileName.split(",") #**************************************************************** # Step 2. DELETE EACH FILE #**************************************************************** daoInstance = FileDAO() for fileName in files: #**************************************************************** # Step 2.1.DELETE THE GIVEN FILE FROM DATABASE #**************************************************************** logging.info("STEP1 - REMOVING " + fileName + " FROM DATABASE...") daoInstance.remove(fileName, otherParams={"userID":userID}) logging.info("STEP1 - REMOVING " + fileName + " FROM DATABASE...DONE") #**************************************************************** # Step 2.2.DELETE THE GIVEN FILE FROM DIRECTORY #**************************************************************** logging.info("STEP2 - REMOVING " + fileName + " FROM USER DIRECTORY...") if os.path.isfile(DESTINATION_DIR + fileName): os.remove(DESTINATION_DIR + fileName) logging.info("STEP2 - REMOVING " + fileName + " FROM USER DIRECTORY...DONE") else: logging.info("STEP2 - REMOVING " + fileName + " FROM USER DIRECTORY...FILE NOT FOUND") response.setContent({"success": True }) except Exception as ex: handleException(response, ex, __file__ , "dataManagementDeleteFile") finally: if(daoInstance != None): daoInstance.closeConnection() return response def dataManagementGetMyJobs(request, response): #VARIABLE DECLARATION jobInstance = None jobInstances = [] daoInstance = None try: #**************************************************************** # Step 0.CHECK IF VALID USER SESSION #**************************************************************** logging.info("STEP0 - CHECK IF VALID USER....") userID = request.cookies.get('userID') sessionToken = request.cookies.get('sessionToken') UserSessionManager().isValidUser(userID, sessionToken) if (userID is None): response.setContent({"success": False, "errorMessage": "Log in required</br>Sorry but the feature you are requesting is only available to registered accounts."}) else: #**************************************************************** # Step 2.GET THE LIST OF JOBS FOR GIVEN USER #**************************************************************** logging.info("STEP1 - GET MY JOB LIST REQUEST RECEIVED") daoInstance = JobDAO() matchedFiles = daoInstance.findAll(otherParams={"userID":userID}) logging.info("STEP1 - GET MY JOB LIST REQUEST RECEIVED...DONE") response.setContent({"success": True, "jobList" : matchedFiles}) except Exception as ex: handleException(response, ex, __file__ , "dataManagementGetMyJobs") finally: if(daoInstance != None): daoInstance.closeConnection() return response def dataManagementDeleteJob(request, response): #VARIABLE DECLARATION daoInstance = None try: #**************************************************************** # Step 0.CHECK IF VALID USER SESSION #**************************************************************** logging.info("STEP0 - CHECK IF VALID USER....") userID = request.cookies.get('userID') sessionToken = request.cookies.get('sessionToken') UserSessionManager().isValidUser(userID, sessionToken) #**************************************************************** # Step 1. GET THE LIST OF JOB IDs #****************************************************************. jobID = request.form.get("jobID") jobs = jobID.split(",") #**************************************************************** # Step 2. DELETE EACH JOB #****************************************************************. daoInstance = JobDAO() userDirID = userID if userID is not None else "nologin" userDir = CLIENT_TMP_DIR + userDirID + "/jobsData/" tmpDir = CLIENT_TMP_DIR + userDirID + "/tmp/" for jobID in jobs: #**************************************************************** # Step 2a. DELETE GIVEN JOB FROM DATABASE #**************************************************************** logging.info("STEP1 - REMOVING " + jobID + " FROM DATABASE...") daoInstance.remove(jobID, otherParams={"userID":userID}) logging.info("STEP1 - REMOVING " + jobID + " FROM DATABASE...DONE") #**************************************************************** # Step 2b. DELETE GIVEN JOB FROM USER DIRECTORY #**************************************************************** logging.info("STEP2 - REMOVING " + userDir + jobID + " FROM USER DIRECTORY...") if os.path.isdir(userDir + jobID): shutil.rmtree(userDir + jobID) logging.info("STEP2 - REMOVING " + userDir + jobID + " FROM USER DIRECTORY...DONE") else: logging.info("STEP2 - REMOVING " + userDir + jobID + " FROM USER DIRECTORY...FILE NOT FOUND") logging.info("STEP2 - REMOVING TEMPORAL DIR " + tmpDir + jobID + " FROM USER DIRECTORY...") if os.path.isdir(tmpDir + jobID): shutil.rmtree(tmpDir + jobID) logging.info("STEP2 - REMOVING TEMPORAL DIR " + tmpDir + jobID + " FROM USER DIRECTORY...") else: logging.info("STEP2 - REMOVING TEMPORAL DIR " + tmpDir + jobID + " FROM USER DIRECTORY...FILE NOT FOUND") response.setContent({"success": True }) except Exception as ex: handleException(response, ex,__file__ , "dataManagementDeleteJob") finally: if(daoInstance != None): daoInstance.closeConnection() return response def dataManagementDownloadFile(request, response): try: #**************************************************************** # Step 0.CHECK IF VALID USER SESSION #**************************************************************** logging.info("STEP0 - CHECK IF VALID USER....") userID = request.cookies.get('userID') sessionToken = request.cookies.get('sessionToken') UserSessionManager().isValidUser(userID, sessionToken) #**************************************************************** # Step 1.READ PARAMS #**************************************************************** fileName = request.args.get("fileName", "") fileType =request.args.get("fileType", "") jobID =request.args.get("jobID", "") serve =(request.args.get("serve", "").lower() == "true") offset =int(request.args.get("offset", 0)) #send_from_directory(self.FILES_SETTINGS.ROOT_DIRECTORY + 'public_html', filename) #**************************************************************** # Step 2.GENERATE THE PATH TO FILE #**************************************************************** logging.info("STEP1 - GET FILE REQUEST RECEIVED") userDirID = userID if userID is not None else "nologin" if fileType=="job_result": userDir = "/jobsData/" + jobID + "/output/" elif fileType=="input": userDir = "/inputData/" else: userDir = "/tmp/" + jobID userDir = CLIENT_TMP_DIR + userDirID + userDir file_path = "{path}/{file}".format(path=userDir, file=fileName) if os.path.isfile(file_path): #IF THE REQUEST WANTS THE FILE IN A STREAM if serve == True: #TODO: HACER ESTO<- http://flask.pocoo.org/docs/0.10/patterns/streaming/ def generate(): with open(file_path) as f: lines = f.readlines() first = min(len(lines), offset) last = min(len(lines), offset + 51) lines = lines[first:last] for row in lines: yield row.rstrip() + "\n" f.close() from flask import Response return Response(generate(), mimetype='text/plain') #response.imetype='text/plain') else: return send_from_directory(userDir, fileName, as_attachment=True, attachment_filename=fileName) else: response.setContent({"success": False, "errorMessage": "File not found.</br>Sorry but it looks like the requested file was removed from system."}) return response except Exception as ex: handleException(response, ex, __file__ , "dataManagementDownloadFile") return response #**************************************************************** # FILES MANIPULATION #**************************************************************** def saveFile(userID, uploadedFileName, options, uploadedFile, DESTINATION_DIR): #1. CREATE THE USER DATA DIRECTORY IF NOT EXISTS if(not os.path.isdir(DESTINATION_DIR)): os.makedirs(DESTINATION_DIR) # Make sure to replace invalid chars to avoid problems uploadedFileName = ''.join('_' if ch in [':', '!', '[', ']', ';'] else ch for ch in uploadedFileName) #TODO: CHECK IF ENOUGH SPACE #SAVE THE FILE TO USER's DIRECTORY file_path = "{path}/{file}".format(path=DESTINATION_DIR, file=uploadedFileName) #CHECK IF FILENAME ALREADY EXISTS -> IF SO, ADD SUBFIX fileExtension=uploadedFileName.rsplit(".") originalName = fileExtension[0] if(len(fileExtension)>1): fileExtension= "." + fileExtension[1] else: fileExtension= "" iteration = 1 while(os.path.isfile(file_path)): uploadedFileName = originalName + str(iteration) + fileExtension file_path = "{path}/{file}".format(path=DESTINATION_DIR, file=uploadedFileName) iteration=iteration+1 logging.info("\tSAVING " + uploadedFile.filename + " AS " + uploadedFileName + "...") uploadedFile.save(file_path) logging.info("\tSAVING " + uploadedFile.filename + " AS " + uploadedFileName + "...DONE") #REGISTER FILE IN DATABASE registerFile(userID, uploadedFileName, options, DESTINATION_DIR) return uploadedFileName def copyFile(userID, fileName, options, origin, destination): file_path = "{path}/{file}".format(path=destination, file=fileName) #CHECK IF FILENAME ALREADY EXISTS -> IF SO, ADD SUBFIX fileExtension=fileName.rsplit(".") originalName = fileExtension[0] if(len(fileExtension)>1): fileExtension= "." + fileExtension[1] else: fileExtension="" iteration = 1 while(os.path.isfile(file_path)): fileName = originalName + str(iteration) + fileExtension file_path = "{path}/{file}".format(path=destination, file=fileName) iteration=iteration+1 logging.info("\tCOPYING " + originalName + fileExtension + " AS " + fileName + "...") shutil.copy(origin + originalName + fileExtension, destination + fileName) logging.info("\tCOPYING " + originalName + fileExtension + " AS " + fileName + "...DONE") #REGISTER FILE IN DATABASE registerFile(userID, fileName, options, destination) return fileName def registerFile(userID, fileName, options, location): # Do not register the file in the database if (str(userID) == 'None'): return None logging.info("\tREGISTERING " + fileName + " INTO DATABASE...") fileInstance = File("") fileInstance.setFileName(fileName) fileInstance.setDataType(options.get("dataType")) fileInstance.setOmicType(options.get("omicType")) fileInstance.setDescription(options.get("description", "")) options.pop("dataType", None) options.pop("omicType", None) options.pop("description", None) if bool(options): #NOT EMPTY fileInstance.otherFields = options file_path = "{path}/{file}".format(path=location, file=fileName) fileInstance.setSize(os.stat(file_path).st_size) import time fileInstance.setSubmissionDate(time.strftime("%d/%m/%Y %H:%M")) daoInstance = FileDAO() daoInstance.remove(fileName, otherParams={"userID": userID}) daoInstance.insert(fileInstance, otherParams={"userID":userID}) logging.info("\tREGISTERING " + fileName + " INTO DATABASE...DONE") if(daoInstance != None): daoInstance.closeConnection() return fileName def dir_total_size(source): total_size = 0 for item in os.listdir(source): itempath = os.path.join(source, item) if os.path.isfile(itempath): total_size += os.path.getsize(itempath) elif os.path.isdir(itempath): #TODO:ignore tmp dir total_size += dir_total_size(itempath) return total_size
Phew, what a week it’s been — not only have we seen the release of The MagPi issue 61, but today we also discovered issue 3 of Hello World on our doorstep. Both are amazing publications for our community of Raspberry Pi makers and educators. We’re always keen to improve our accessibility and increase our reach, and if you want to help us, why not contribute by translating our YouTube video content? That’s it from us for the week! Have a great weekend, and we’ll see you next Friday. (thepihut.com) We know it's only September...but these are gorgeous! (raspberrypi.org) -. . - ..-. .-.. .. -..- / .- -. -.. / -.-. .... .. .-.. .-..? (raspberrypi.org) Who has time to solve these? (circuitbeard.co.uk) We'd completely forgotten these existed! Who's ready for the 2nd grade? (twitter.com) Too cool for school! Code Club and the Raspberry Pi Foundation are ready — are you?
#!/usr/bin/env python """ environment variable inventory source If the environment variable is undefined, the variable is also undefined. The group level environment variables are only supported, and the host level environment variables are not supported. The environment variable name must be uppercase. ## Examples The environment variable "FOO" is assigned to the variable "foo". ```yaml env_vars: - foo ``` The environment variable "FOO" is assigned to the variable "bar". ```yaml env_vars: bar: foo ``` """ import argparse import json import os import sys import yaml def main(): parser = get_parser() args = parser.parse_args() if "ENV" not in os.environ: sys.stdout.write("[ERROR] The environment variable 'ENV' is required.\n") sys.exit(1) env = os.environ["ENV"] if args.list: do_list(env) if args.host: do_host(env, args.host) def do_host(env, hostname): ret = {} json.dump(ret, sys.stdout) def do_list(env): ret = {} with open("inventories/{}.yml".format(env)) as r: groups = [("all", yaml.load(r)["all"])] while groups: group_name, group = groups.pop() node, children = parse_group(group) ret[group_name] = node for name, child in children.items(): groups.append((name, child)) json.dump(ret, sys.stdout) def get_parser(): parser = argparse.ArgumentParser() parser.add_argument("--list", action="store_true") parser.add_argument("--host") return parser def parse_group(group): env_vars = group.get("env_vars", {}) ev = {} if env_vars is None: pass elif isinstance(env_vars, list): for e in env_vars: k = e.upper() if k in os.environ: ev[e] = os.environ[k] elif isinstance(env_vars, dict): for k,v in env_vars.items(): env_name = v.upper() if env_name in os.environ: ev[k] = os.environ[env_name] children = group.get("children", {}) hostvars = group.get("hosts", {}) if hostvars is None: hostvars = {} ret = { "hosts": hostvars.keys() if isinstance(hostvars, dict) else hostvars, "vars": ev, "children": children.keys() } return ret, children if __name__ == "__main__": main()
We’ve recently been trying out the Vauxhall Crossland X. The Crossland X is a crossover SUV and part of the X range alongside the Mokka X and the Grandland X. Everyone seems to be buying an SUV these days, particularly families like ours, and so the Crossland X needs to offer a USP if it is to be chosen over the many other SUV options on the market. The Crossland X is really versatile when it comes to creating space. It has a 60/40 split folding rear seat which gave us greater flexibility when trying to fit everything in! Our stroller is pretty bulky as far as strollers go, but I could fit it in the boot along with several bags of shopping once I removed the flex floor which gave me the extra space I needed. The Crossland X has style in abundance. With a contrast roof colour and co-ordinating mirrors, our Crossland X in Amber Orange with Mineral Black roof really turned heads as we drove around town in it. We took the Vauxhall Crossland X out to the Ribble Valley and onto the Yorkshire Moors to really stretch it’s legs, so to speak. The Crossland X is great on the winding roads and the boys loved exploring the area. Our Crossland X had some serious tech on board. I loved the easy to connect Android Auto (the car also has Apple CarPlay) – hands free is an absolute must for me when I’m in the car. There is also an SOS feature on board. Don’t press that unless you are in trouble – or you will end up in trouble as my husband discovered! The Crossland X is a nice, smooth drive in and around town. Country roads are no issue either and the 1.6l engine copes perfectly in either setting. Cruise control with speed limiter comes as standard, as do alloy wheels, auto lighting and windscreen wipers, dark tinted rear windows and a lot more besides. For the on the road price of £21,380.00 the Crossland X has all the gadgets, safety features and comfort quality you would ask for in a family car. With adjustable seats in the front including lumbar support, there is no excuse for not being able to drive comfortably in the Vauxhall Crossland X. Just take the time to find the right settings for you and you’ll be driving in comfort every time. Three children fit in the back at a bit of a squeeze (we have one baby seat and one booster, with the biggest child in the middle). For short journeys that would suit us fine but I’m not sure a road trip would be comfortable. I liked the Vauxhall Crossland X. I think it might be a little bit on the small side for a family of five, and though we did fit all three children in the back they felt a bit hemmed in. My husband commented on the safety features of the car, which was something he really liked about it. Side protection mouldings, six airbags and a tyre pressure monitoring system are all welcome additions to a vehicle when you’re travelling with your most precious people!
import re from django.conf import settings from django.core.urlresolvers import reverse from django.test import LiveServerTestCase from django.contrib.auth import get_user_model from django.core import mail from selenium.webdriver.support.wait import WebDriverWait from froide.foirequest.tests import factories from froide.foirequest.models import FoiRequest from froide.publicbody.models import PublicBody User = get_user_model() def get_selenium(): driver = getattr(settings, 'TEST_SELENIUM_DRIVER', 'firefox') if driver == 'chrome': from selenium.webdriver.chrome.webdriver import WebDriver as ChromeDriver return ChromeDriver() elif driver == 'phantomjs': from selenium.webdriver import PhantomJS return PhantomJS() else: from selenium.webdriver.firefox.webdriver import WebDriver as FirefoxDriver return FirefoxDriver() class JavaScriptException(Exception): pass class CheckJSErrors(object): def __init__(self, driver): self.driver = driver def __enter__(self): self.driver.execute_script(''' window.onerror=function(msg){ $('body').attr('jserror', msg); }; ''') def __exit__(self, exc_type, exc_value, traceback): body = self.driver.find_elements_by_xpath('//body[@jserror]') if body: msg = body[0].get_attribute('jserror') raise JavaScriptException(msg) class TestMakingRequest(LiveServerTestCase): @classmethod def setUpClass(cls): cls.selenium = get_selenium() cls.selenium.implicitly_wait(3) super(TestMakingRequest, cls).setUpClass() @classmethod def tearDownClass(cls): cls.selenium.quit() super(TestMakingRequest, cls).tearDownClass() def scrollTo(self, id=None, klass=None): if id is not None: self.selenium.find_element_by_id(id).location_once_scrolled_into_view selector = '#' + id if klass is not None: self.selenium.find_element_by_class_name(klass).location_once_scrolled_into_view selector = '.' + klass self.selenium.execute_script("window.scrollTo(0,0);$('%s').focus();" % selector) def setUp(self): factories.make_world() factories.rebuild_index() self.user = User.objects.all()[0] self.pb = PublicBody.objects.all()[0] def do_login(self, navigate=True): if navigate: self.selenium.get('%s%s' % (self.live_server_url, reverse('account-login'))) email_input = self.selenium.find_element_by_id("id_email") email_input.send_keys(self.user.email) password_input = self.selenium.find_element_by_id("id_password") password_input.send_keys('froide') self.selenium.find_element_by_xpath( '//form//button[contains(text(), "Log In")]').click() def test_make_not_logged_in_request(self): self.selenium.get('%s%s' % (self.live_server_url, reverse('foirequest-make_request'))) with CheckJSErrors(self.selenium): search_pbs = self.selenium.find_element_by_id('id_public_body') search_pbs.send_keys(self.pb.name) self.selenium.find_element_by_class_name('search-public_bodies-submit').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_css_selector('.search-results .search-result')) self.selenium.find_element_by_css_selector('.search-results .search-result label').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('option-check_foi').is_displayed()) self.selenium.find_element_by_id('option-check_foi').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('continue-foicheck')) self.selenium.find_element_by_id('continue-foicheck').click() req_title = 'FoiRequest Number' self.selenium.find_element_by_id('id_subject').send_keys(req_title) self.selenium.find_element_by_id('id_body').send_keys('Documents describing something...') WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_elements_by_css_selector('#similar-requests li')) WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('review-button').is_displayed() ) self.selenium.find_element_by_id('id_first_name')\ .send_keys('Peter') self.selenium.find_element_by_id('id_last_name')\ .send_keys('Parker') user_email = 'peter.parker@example.com' self.selenium.find_element_by_id('id_user_email')\ .send_keys(user_email) self.selenium.find_element_by_id('id_terms').click() self.selenium.find_element_by_id('review-button').click() self.selenium.find_element_by_id('step-review') WebDriverWait(self.selenium, 10).until( lambda driver: 'in' in self.selenium.find_element_by_id('step-review').get_attribute('class')) self.scrollTo(id='send-request-button') mail.outbox = [] self.selenium.find_element_by_id('send-request-button').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_css_selector('.heroine-unit')) new_user = User.objects.get(email=user_email) self.assertEqual(new_user.private, False) req = FoiRequest.objects.get(user=new_user) self.assertEqual(req.title, req_title) self.assertEqual(req.public, True) self.assertEqual(req.public_body, self.pb) self.assertEqual(req.status, 'awaiting_user_confirmation') message = mail.outbox[0] match = re.search('http://[^/]+(/.+)', message.body) activate_url = match.group(1) self.selenium.get('%s%s' % (self.live_server_url, activate_url)) WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_css_selector('#change-password-now')) self.assertIn('?new#change-password-now', self.selenium.current_url) req = FoiRequest.objects.get(user=new_user) self.assertEqual(req.status, 'awaiting_response') def test_make_not_logged_in_request_to_public_body(self): self.selenium.get('%s%s' % (self.live_server_url, reverse('foirequest-make_request', kwargs={'public_body': self.pb.slug}))) with CheckJSErrors(self.selenium): WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('option-check_foi').is_displayed()) self.selenium.find_element_by_id('option-check_foi').click() self.selenium.find_element_by_id('continue-foicheck').click() req_title = 'FoiRequest Number' self.selenium.find_element_by_id('id_subject').send_keys(req_title) self.selenium.find_element_by_id('id_body').send_keys('Documents describing something...') WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_elements_by_css_selector('#similar-requests li')) WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('review-button').is_displayed() ) user_first_name = 'Peter' user_last_name = 'Parker' self.selenium.find_element_by_id('id_first_name')\ .send_keys(user_first_name) self.selenium.find_element_by_id('id_last_name')\ .send_keys(user_last_name) user_email = 'peter.parker@example.com' self.selenium.find_element_by_id('id_user_email')\ .send_keys(user_email) self.selenium.find_element_by_id('id_terms').click() self.selenium.find_element_by_id('id_public').click() self.selenium.find_element_by_id('id_private').click() self.selenium.find_element_by_id('review-button').click() WebDriverWait(self.selenium, 10).until( lambda driver: 'in' in self.selenium.find_element_by_id('step-review').get_attribute('class')) self.scrollTo(id='send-request-button') self.selenium.find_element_by_id('send-request-button').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_css_selector('.heroine-unit')) new_user = User.objects.get(email=user_email) self.assertEqual(new_user.first_name, user_first_name) self.assertEqual(new_user.last_name, user_last_name) self.assertEqual(new_user.private, True) req = FoiRequest.objects.get(user=new_user) self.assertEqual(req.title, req_title) self.assertEqual(req.public, False) self.assertEqual(req.public_body, self.pb) self.assertEqual(req.status, 'awaiting_user_confirmation') def test_make_logged_in_request(self): self.do_login() self.selenium.get('%s%s' % (self.live_server_url, reverse('foirequest-make_request'))) with CheckJSErrors(self.selenium): search_pbs = self.selenium.find_element_by_id('id_public_body') search_pbs.send_keys(self.pb.name) self.selenium.find_element_by_class_name('search-public_bodies-submit').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_css_selector('.search-results .search-result')) self.selenium.find_element_by_css_selector('.search-results .search-result label').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('option-check_foi').is_displayed()) self.selenium.find_element_by_id('option-check_foi').click() self.selenium.find_element_by_id('continue-foicheck').click() req_title = 'FoiRequest Number' WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('id_body').is_displayed() ) self.selenium.find_element_by_id('id_subject').send_keys(req_title) self.selenium.find_element_by_id('id_body').send_keys('Documents describing something...') WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_elements_by_css_selector('#similar-requests li')) WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('review-button').is_displayed() ) self.selenium.find_element_by_id('review-button').click() WebDriverWait(self.selenium, 10).until( lambda driver: 'in' in self.selenium.find_element_by_id('step-review').get_attribute('class')) self.scrollTo(id='send-request-button') WebDriverWait(self.selenium, 10).until( lambda driver: self.selenium.find_element_by_id('send-request-button').is_displayed()) self.selenium.find_element_by_id('send-request-button').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_css_selector('#messages')) req = FoiRequest.objects.filter(user=self.user).order_by('-id')[0] self.assertIn(req.get_absolute_url(), self.selenium.current_url) self.assertEqual(req.title, req_title) self.assertEqual(req.public, True) self.assertEqual(req.public_body, self.pb) self.assertEqual(req.status, 'awaiting_response') def test_make_logged_in_request_no_pb_yet(self): self.do_login() self.selenium.get('%s%s' % (self.live_server_url, reverse('foirequest-make_request'))) with CheckJSErrors(self.selenium): self.selenium.find_element_by_id('option-emptypublicbody').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('option-check_foi').is_displayed()) self.selenium.find_element_by_id('option-check_foi').click() self.selenium.find_element_by_id('continue-foicheck').click() req_title = 'FoiRequest Number' WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('id_body').is_displayed() ) self.selenium.find_element_by_id('id_subject').send_keys(req_title) self.selenium.find_element_by_id('id_body').send_keys('Documents describing something...') WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_elements_by_css_selector('#similar-requests li')) WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('review-button').is_displayed() ) self.selenium.find_element_by_id('review-button').click() WebDriverWait(self.selenium, 10).until( lambda driver: 'in' in self.selenium.find_element_by_id('step-review').get_attribute('class')) self.scrollTo(id='send-request-button') WebDriverWait(self.selenium, 10).until( lambda driver: self.selenium.find_element_by_id('send-request-button').is_displayed()) self.selenium.find_element_by_id('send-request-button').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_css_selector('#messages')) req = FoiRequest.objects.filter(user=self.user).order_by('-id')[0] self.assertIn(req.get_absolute_url(), self.selenium.current_url) self.assertEqual(req.title, req_title) self.assertEqual(req.public, True) self.assertTrue(req.public_body is None) self.assertEqual(req.status, 'publicbody_needed') def test_make_request_logged_out_with_existing_account(self): self.selenium.get('%s%s' % (self.live_server_url, reverse('foirequest-make_request'))) with CheckJSErrors(self.selenium): self.selenium.find_element_by_id('option-emptypublicbody').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('option-check_foi').is_displayed()) self.selenium.find_element_by_id('option-check_foi').click() self.selenium.find_element_by_id('continue-foicheck').click() req_title = 'FoiRequest Number' WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('id_body').is_displayed() ) self.selenium.find_element_by_id('id_subject').send_keys(req_title) self.selenium.find_element_by_id('id_body').send_keys('Documents describing something...') user_first_name = self.user.first_name user_last_name = self.user.last_name self.selenium.find_element_by_id('id_first_name')\ .send_keys(user_first_name) self.selenium.find_element_by_id('id_last_name')\ .send_keys(user_last_name) self.selenium.find_element_by_id("id_user_email").send_keys(self.user.email) self.selenium.find_element_by_id('id_terms').click() self.selenium.find_element_by_id('id_public').click() self.selenium.find_element_by_id('id_private').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_elements_by_css_selector('#similar-requests li')) WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_id('review-button').is_displayed() ) self.selenium.find_element_by_id('review-button').click() WebDriverWait(self.selenium, 10).until( lambda driver: 'in' in self.selenium.find_element_by_id('step-review').get_attribute('class')) self.scrollTo(id='send-request-button') WebDriverWait(self.selenium, 10).until( lambda driver: self.selenium.find_element_by_id('send-request-button').is_displayed()) self.selenium.find_element_by_id('send-request-button').click() main_window_handle = self.selenium.current_window_handle login_link = '//div[@class="user_data_form"]//ul[@class="errorlist"]//a' with CheckJSErrors(self.selenium): WebDriverWait(self.selenium, 10).until( lambda driver: self.selenium.find_element_by_xpath(login_link) ) self.scrollTo(klass='target-small') WebDriverWait(self.selenium, 10).until( lambda driver: self.selenium.find_element_by_xpath(login_link).is_displayed()) self.selenium.find_element_by_xpath(login_link).click() popup_handle = [wh for wh in self.selenium.window_handles if wh != main_window_handle][0] self.selenium.switch_to_window(popup_handle) with CheckJSErrors(self.selenium): password_input = self.selenium.find_element_by_id("id_password") password_input.send_keys('froide') self.selenium.find_element_by_xpath( '//form//button[contains(text(), "Log In")]').click() self.selenium.switch_to_window(main_window_handle) with CheckJSErrors(self.selenium): self.selenium.find_element_by_id('review-button').click() WebDriverWait(self.selenium, 10).until( lambda driver: 'in' in self.selenium.find_element_by_id('step-review').get_attribute('class')) self.scrollTo(id='send-request-button') WebDriverWait(self.selenium, 10).until( lambda driver: self.selenium.find_element_by_id('send-request-button').is_displayed()) self.selenium.find_element_by_id('send-request-button').click() req = FoiRequest.objects.filter(user=self.user).order_by('-id')[0] self.assertIn(req.get_absolute_url(), self.selenium.current_url) self.assertEqual(req.title, req_title) self.assertEqual(req.public, False) self.assertTrue(req.public_body is None) self.assertEqual(req.status, 'publicbody_needed') def test_collapsed_menu(self): self.selenium.set_window_size(600, 800) self.selenium.get('%s%s' % (self.live_server_url, reverse('index'))) self.selenium.find_element_by_css_selector('.navbar-toggle').click() WebDriverWait(self.selenium, 5).until( lambda driver: driver.find_element_by_css_selector('.navbar-form').is_displayed() )
Keen competition : Worldwide enterprise has to face eager (too much) competitors on this planet market. Therefore, developing nations open up their economies by way of liberal financial insurance policies. The Companies Act requires that an audited set of economic statements, made as much as not more than six months before each Annual Normal Assembly, is to be presented to the shareholders at the meeting. Special function of science and know-how : Worldwide enterprise offers a variety of significance to science and technology. FRS are primarily adopted from Worldwide Monetary Reporting Requirements (IFRS). Worldwide enterprise helps them to switch such high high-end technologies to the growing nations. Audit Exemption Beginning with the financial 12 months beginning on or after 15 Could 2003, the following companies are no longer required to have their accounts audited. So, developing nations discover it very difficult to face competition from developed nations. All companies incorporated underneath the Corporations Act are required to take care of books of accounts that sufficiently explain the transactions and monetary position of the corporate. Financial Periods Commencing before 1 January 2003 The principal source of accounting rules in Singapore, specifically Statements of Accounting Requirements (SAS) and Interpretation of Statements of Accounting Requirements (INT), are issued by ICPAS. Developed countries even have many contacts on the earth market. All this ends in financial development of the developing countries. As a substitute, Singapore Monetary Reporting Standards (FRS), issued by the brand new accounting requirements-setting body, the Council on Corporate Disclosure and Governance (CCDG), are now effective. Many governments do not allow worldwide companies to enter their nations. They may then report on the trueness and fairness of the financial statements to the shareholders on the Annual Basic Meeting. Intergration of economies : International business integrates (combines) the economies of many nations.
# -*- coding: utf-8 -*- # ------------------------------------------------------------ # pelisalacarta 4 # Copyright 2015 tvalacarta@gmail.com # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ # # Distributed under the terms of GNU General Public License v3 (GPLv3) # http://www.gnu.org/licenses/gpl-3.0.html # ------------------------------------------------------------ # This file is part of pelisalacarta 4. # # pelisalacarta 4 is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pelisalacarta 4 is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pelisalacarta 4. If not, see <http://www.gnu.org/licenses/>. # ------------------------------------------------------------ # Parámetros de configuración (XBMC) # ------------------------------------------------------------ import os import sys import xbmc import xbmcplugin PLATFORM_NAME = "xbmc-plugin" PLUGIN_NAME = "pelisalacarta" def get_platform(full_version=False): #full_version solo es util en xbmc/kodi ret = { 'num_version': 9.0 , 'name_version': PLATFORM_NAME , 'video_db': "", 'plaform': PLATFORM_NAME } if full_version: return ret else: return PLATFORM_NAME def is_xbmc(): return True def get_library_support(): return True def get_system_platform(): """ fonction: pour recuperer la platform que xbmc tourne """ platform = "unknown" if xbmc.getCondVisibility("system.platform.linux"): platform = "linux" elif xbmc.getCondVisibility("system.platform.xbox"): platform = "xbox" elif xbmc.getCondVisibility("system.platform.windows"): platform = "windows" elif xbmc.getCondVisibility("system.platform.osx"): platform = "osx" return platform def open_settings(): xbmcplugin.openSettings(sys.argv[0]) def get_setting(name, channel=""): """ Retorna el valor de configuracion del parametro solicitado. Devuelve el valor del parametro 'name' en la configuracion global o en la configuracion propia del canal 'channel'. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.pelisalacarta\settings_channels el archivo channel_data.json y lee el valor del parametro 'name'. Si el archivo channel_data.json no existe busca en la carpeta channels el archivo channel.xml y crea un archivo channel_data.json antes de retornar el valor solicitado. Si el parametro 'name' no existe en channel_data.json lo busca en la configuracion global y si ahi tampoco existe devuelve un str vacio. Parametros: name -- nombre del parametro channel [opcional] -- nombre del canal Retorna: value -- El valor del parametro 'name' """ # Specific channel setting if channel: # logger.info("config.get_setting reading channel setting '"+name+"' from channel xml") from core import channeltools value = channeltools.get_channel_setting(name, channel) # logger.info("config.get_setting -> '"+repr(value)+"'") if value is not None: return value else: return "" # Global setting else: # logger.info("config.get_setting reading main setting '"+name+"'") value = xbmcplugin.getSetting(channel + name) # Translate Path if start with "special://" if value.startswith("special://") and "librarypath" not in name: value = xbmc.translatePath(value) # logger.info("config.get_setting -> '"+value+"'") return value def set_setting(name, value, channel=""): """ Fija el valor de configuracion del parametro indicado. Establece 'value' como el valor del parametro 'name' en la configuracion global o en la configuracion propia del canal 'channel'. Devuelve el valor cambiado o None si la asignacion no se ha podido completar. Si se especifica el nombre del canal busca en la ruta \addon_data\plugin.video.pelisalacarta\settings_channels el archivo channel_data.json y establece el parametro 'name' al valor indicado por 'value'. Si el archivo channel_data.json no existe busca en la carpeta channels el archivo channel.xml y crea un archivo channel_data.json antes de modificar el parametro 'name'. Si el parametro 'name' no existe lo añade, con su valor, al archivo correspondiente. Parametros: name -- nombre del parametro value -- valor del parametro channel [opcional] -- nombre del canal Retorna: 'value' en caso de que se haya podido fijar el valor y None en caso contrario """ if channel: from core import channeltools return channeltools.set_channel_setting(name, value, channel) else: try: xbmcplugin.setSetting(name, value) except: return None return value def get_localized_string(code): dev = xbmc.getLocalizedString(code) try: dev = dev.encode("utf-8") except: pass return dev def get_library_config_path(): value = get_setting("librarypath") if value == "": verify_directories_created() value = get_setting("librarypath") return value def get_library_path(): return xbmc.translatePath(get_library_config_path()) def get_temp_file(filename): return xbmc.translatePath(os.path.join("special://temp/", filename)) def get_runtime_path(): return os.getcwd() def get_data_path(): dev = xbmc.translatePath("special://profile/plugin_data/video/pelisalacarta") #Crea el directorio si no existe if not os.path.exists(dev): os.makedirs(dev) return dev def get_cookie_data(): import os ficherocookies = os.path.join(get_data_path(), 'cookies.dat') cookiedatafile = open(ficherocookies, 'r') cookiedata = cookiedatafile.read() cookiedatafile.close() return cookiedata # Test if all the required directories are created def verify_directories_created(): from core import logger from core import filetools config_paths = [["librarypath", "library"], ["downloadpath", "downloads"], ["downloadlistpath", "downloads/list"], ["settings_path", "settings_channels"]] for path, default in config_paths: saved_path = get_setting(path) if not saved_path: saved_path = "special://profile/plugin_data/video/pelisalacarta/" + default set_setting(path, saved_path) saved_path = xbmc.translatePath(saved_path) if not filetools.exists(saved_path): logger.debug("Creating %s: %s" % (path, saved_path)) filetools.mkdir(saved_path) # Biblioteca if path == "librarypath": set_setting("library_version", "v4")
Hi, I am interested in your 2019 Suzuki GSX-R1000 (stock #TBD) listed on Autotrader. I'd like to get more information about this vehicle and confirm its availability. From there, the fundamental capabilities that make a great sportbike were distilled down to three words: run, stop, and turn making the new GSX-R1000 run better, turn better, and stop better than any other sportbike.
import numpy as np import re import os import cv2 import csv import funcionesCV_recurrentes as cvr import georef from conf import RUTA_PLANTILLAS numeros = re.compile('\D') espacios = re.compile('\s') with open(RUTA_PLANTILLAS+'/localidades.csv') as localidades: reader = csv.DictReader(localidades) err_msg = ', no es un código válido. ' err_no0 = 'El tercer caractér no corresponde con el patrón.' err_spa = 'el código contiene espacios.' err_tail = ' Lo corregiré automáticamente pero es conveniente que lo' err_tail = err_tail + ' verifique en el archivo "localidades.cvs"' LOCALIDADES = {} for row in reader: Localidad = dict(row) Localidad.pop('Idlocalidad') Localidad.pop('Comentarios') err = '{0}'.format(row['Idlocalidad']) + err_msg if espacios.match(row['Idlocalidad']): err = err + err_spa + err_tail print(err) row['Idlocalidad'] = ''.join(row['Idlocalidad'].split()) if numeros.match(row['Idlocalidad'][2]): err = err + err_no0 + err_tail print(err) id_as_list = list(row['Idlocalidad']) id_as_list[2] = '0' row['Idlocalidad'] = ''.join(id_as_list) print(row['Idlocalidad']) LOCALIDADES[row['Idlocalidad']] = Localidad class localidad(object): ''' Clase para manejar los departamentos ''' def __init__(self, template, imagen, id): ''' Inicia el objeto departamento cargando una imagen ''' self.template = cv2.imread(template, 0) self.w, self.h = self.template.shape[::-1] self.ruta_archivo = template self.nombre_archivo = os.path.basename(template) self.id = os.path.basename(template)[:-4] self.nombre = LOCALIDADES[self.id]['Nombre'] self.imagen = imagen if cvr.detectar(self.template, imagen, 400000)[0] is not None: self.supi, self.infd, self.roi = cvr.detectar(self.template, imagen, 400000) else: self.supi, self.infd, self.roi = (None, None, None) print('No se encontraron coincidencias para {0}'.format(id)) def enmarcar(self, color): if (self.supi is not None and self.infd is not None and self.roi is not None): enmarcado = cv2.rectangle(self.imagen, self.supi, self.infd, color, 1) return(enmarcado) def escribir_nombre(self, color, imagen=[], supi=[]): if supi == []: supi = self.supi if imagen == []: imagen = self.imagen if supi is not None: nombre_en_mapa = cv2.putText(imagen, self.id, supi, cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 1 ) return(nombre_en_mapa)
Refer to the two show output examples below. The switch with the e8ba.70b5.7180 MAC address is the root bridge for which VLAN or VLANs? Refer to the Cisco IOS XR commands exhibit. The router administrator is trying to activate a software package on the router but is not able to do so. Which statement about this problem is true? A. The router needs to be in the global configuration mode. B. The router needs to be in the admin global configuration mode. C. The router needs to be in the admin EXEC mode. D. The install activate command is not the correct command to use. E. The administrator needs to log in as the “root” user instead of the “admin” user. During the BGP route selection process on Cisco routers, which BGP attribute is examined first, to determine the best path to use? Which type of service provider is responsible for offering backbone connectivity services to other service providers? Cisco IP NGN service providers can offer multiple servic es to their customers by using which type of technology in their core networks?
# -*- coding: utf-8 -*- import json import os from collections import OrderedDict from unittest.mock import Mock import pytest from gridsync import autostart_file_path, config_dir, pkgdir from gridsync.filter import ( apply_filters, filter_tahoe_log_message, get_filters, ) @pytest.fixture def core(): c = Mock() c.executable = "/tmp/test/tahoe.exe" gateway = Mock() gateway.name = "TestGrid" gateway.newscap = "URI:NEWSCAP" storage_settings = OrderedDict() # Because python3.5 storage_settings["v0-22222"] = { "anonymous-storage-FURL": "pb://333@444.example:1234/5555" } storage_settings["v0-66666"] = { "anonymous-storage-FURL": "pb://777@888.example:1234/9999" } gateway.get_settings = Mock( return_value={ "rootcap": "URI:000:111", "introducer": "pb://aaa@bbb.example:12345/ccc", "storage": storage_settings, } ) gateway.magic_folders = OrderedDict() # Because python3.5 gateway.magic_folders["TestFolder"] = { "collective_dircap": "URI:aaa:bbb", "upload_dircap": "URI:ccc:ddd", "admin_dircap": "URI:eee:fff", "directory": "/tmp/test/TestFolder", "member": "Alice", } gateway.magic_folders["CatPics"] = { "collective_dircap": "URI:ggg:hhh", "upload_dircap": "URI:iii:jjj", "admin_dircap": "URI:kkk:lll", "directory": "/tmp/test/CatPics", "member": "Bob", } c.gui.main_window.gateways = [gateway] return c @pytest.mark.parametrize( "pair", [ (pkgdir, "PkgDir"), (config_dir, "ConfigDir"), (autostart_file_path, "AutostartFilePath"), (os.path.expanduser("~"), "HomeDir"), ], ) def test_get_filters_pair_in_default_filters(core, pair): filters = get_filters(core) assert pair in filters @pytest.mark.parametrize( "string", [ pkgdir, config_dir, autostart_file_path, "TestGrid", "URI:NEWSCAP", "URI:000:111", "v0-22222", "pb://333@444.example:1234/5555", "v0-66666", "pb://777@888.example:1234/9999", "TestFolder", "URI:aaa:bbb", "URI:ccc:ddd", "URI:eee:fff", "/tmp/test/TestFolder", "Alice", "CatPics", "URI:ggg:hhh", "URI:iii:jjj", "URI:kkk:lll", "/tmp/test/CatPics", "Bob", os.path.expanduser("~"), "/tmp/test/tahoe.exe", ], ) def test_apply_filters_string_not_in_result(core, string): filters = get_filters(core) in_str = "Bob gave {} to Alice".format(string) result = apply_filters(in_str, filters) assert string not in result @pytest.mark.parametrize( "string,filtered", [ (pkgdir, "PkgDir"), (config_dir, "ConfigDir"), (autostart_file_path, "AutostartFilePath"), ("TestGrid", "GatewayName:1"), ("URI:NEWSCAP", "Newscap:1"), ("URI:000:111", "Rootcap:1"), ("v0-22222", "StorageServerName:1:1"), ("pb://333@444.example:1234/5555", "StorageServerFurl:1:1"), ("v0-66666", "StorageServerName:1:2"), ("pb://777@888.example:1234/9999", "StorageServerFurl:1:2"), ("URI:aaa:bbb", "Folder:1:1:CollectiveDircap"), ("URI:ccc:ddd", "Folder:1:1:UploadDircap"), ("URI:eee:fff", "Folder:1:1:AdminDircap"), ("/tmp/test/TestFolder", "Folder:1:1:Directory"), ("TestFolder", "Folder:1:1:Name"), ("Alice", "Folder:1:1:Member"), ("URI:ggg:hhh", "Folder:1:2:CollectiveDircap"), ("URI:iii:jjj", "Folder:1:2:UploadDircap"), ("URI:kkk:lll", "Folder:1:2:AdminDircap"), ("/tmp/test/CatPics", "Folder:1:2:Directory"), ("CatPics", "Folder:1:2:Name"), ("Bob", "Folder:1:2:Member"), (os.path.expanduser("~"), "HomeDir"), ("/tmp/test/tahoe.exe", "TahoeExecutablePath"), ], ) def test_apply_filters_filtered_string_in_result(core, string, filtered): filters = get_filters(core) in_str = "Bob gave {} to Alice".format(string) result = apply_filters(in_str, filters) assert "<Filtered:{}>".format(filtered) in result @pytest.mark.parametrize( "msg,keys", [ ( { "action_type": "dirnode:add-file", "action_status": "started", "metadata": { "last_downloaded_timestamp": 1554248457.597176, "user_mtime": 1554212870.7714074, "version": 0, }, "name": "lolcat.jpg", "overwrite": True, "task_level": [4, 3, 5, 6, 1], "task_uuid": "c7a1ec7e-93c1-4549-b916-adc28cda73a1", "timestamp": 1554248457.597313, }, ["name"], ), ( { "action_type": "invite-to-magic-folder", "action_status": "started", "timestamp": 1554305616.315925, "client_num": 0, "nickname": "Alice\u00f8", "task_level": [1], "task_uuid": "c0fd93dc-01c3-48e5-a0fa-14028cb83cdc", }, ["nickname"], # XXX MemberName ), ( { "action_type": "join-magic-folder", "action_status": "started", "timestamp": 1554305611.622096, "local_dir": "cli/MagicFolder/create-and-then-invite-join/magic", "client_num": 0, "task_uuid": "41282946-79da-490f-b640-9a0ae349ffb4", "task_level": [1], "invite_code": "URI:DIR2-RO:3x67kv2fmpz2fji4s775o72yxe:jpi2cfxsc4xjioea735g7fnqdjimkn6scpit4xumkkzk27nfm6pq+URI:DIR2:shiycttqoawwqkonizibpkx5ye:6hv4g33odqojq23g5bq22ej6if6kinytivsmx2gwhuol65fxd2za", }, ["local_dir", "invite_code"], ), ( { "action_type": "magic-folder-db:update-entry", "action_status": "started", "last_downloaded_timestamp": 1554248457.008035, "last_downloaded_uri": "URI:CHK:452hmzwvthqbsawh6e4ua4plei:6zeihsoigv7xl7ijdmyzfa7wt5rajqhj3ppmaqgxoilt4n5srszq:1:1:201576", "last_uploaded_uri": "URI:CHK:452hmzwvthqbsawh6e4ua4plei:6zeihsoigv7xl7ijdmyzfa7wt5rajqhj3ppmaqgxoilt4n5srszq:1:1:201576", "pathinfo": { "ctime_ns": 1554212870771407360, "exists": True, "isdir": False, "isfile": True, "islink": False, "mtime_ns": 1554212870771407360, "size": 201576, }, "relpath": "Garfield.jpg", "task_level": [4, 3, 4, 7, 1], "task_uuid": "c7a1ec7e-93c1-4549-b916-adc28cda73a1", "timestamp": 1554248457.573836, "version": 0, }, ["last_downloaded_uri", "last_uploaded_uri", "relpath"], ), ( { "action_type": "magic-folder:add-pending", "action_status": "started", "relpath": "Grumpy Cat.jpg", "task_level": [2, 2, 9, 1], "task_uuid": "c7a1ec7e-93c1-4549-b916-adc28cda73a1", "timestamp": 1554248455.404073, }, ["relpath"], ), ( { "action_type": "magic-folder:downloader:get-latest-file", "task_uuid": "bb07d7f1-0af0-44ed-9bcb-e60828fcf0a3", "task_level": [18, 5, 16, 3, 1], "timestamp": 1554305539.486, "name": "blam", "action_status": "started", }, ["name"], ), ( { "action_type": "magic-folder:full-scan", "action_status": "started", "direction": "uploader", "nickname": "Demo Grid", "task_level": [2, 1], "task_uuid": "1f7049fd-1530-4d12-8461-94e42655f1be", "timestamp": 1554248626.324124, }, ["nickname"], ), ( { "action_type": "magic-folder:iteration", "action_status": "started", "direction": "uploader", "nickname": "Demo Grid", "task_level": [4, 1], "task_uuid": "c7a1ec7e-93c1-4549-b916-adc28cda73a1", "timestamp": 1554248455.40636, }, ["nickname"], ), ( { "action_type": "magic-folder:notified", "action_status": "started", "timestamp": 1554305907.525834, "nickname": "client-0", "task_uuid": "b29934a9-ec4f-44d1-b987-45a8cc0d2ba2", "task_level": [7, 4, 2, 1], "path": "/Users/vagrant/tahoe-lafs/_trial_temp/immutable/Test/code/clients/2g45r67f/tmp/tmpP9HEA2/local_dir/bar", "direction": "uploader", }, ["nickname", "path"], ), ( { "action_type": "magic-folder:process-directory", "task_uuid": "bc637a12-9141-41de-b36e-6eccc0a65e86", "task_level": [8, 3, 2, 7, 6], "timestamp": 1554305529.111, "action_status": "succeeded", "created_directory": "subdir", }, ["created_directory"], ), ( { "action_type": "magic-folder:process-item", "action_status": "started", "item": {"relpath": "Garfield.jpg", "size": 201576}, "task_level": [13, 3, 2, 1], "task_uuid": "d3a0e3db-3cd6-49c5-9847-7c742b6eec56", "timestamp": 1554250168.097768, }, ["item"], # XXX dict with relpath ), ( { "action_type": "magic-folder:processing-loop", "action_status": "started", "direction": "uploader", "nickname": "Demo Grid", "task_level": [3, 1], "task_uuid": "c7a1ec7e-93c1-4549-b916-adc28cda73a1", "timestamp": 1554248455.406146, }, ["nickname"], ), ( { "action_type": "magic-folder:remove-from-pending", "action_status": "started", "pending": [ "Cheshire Cat.jpeg", "Kitler.png", "Colonel Meow.jpg", "Waffles.jpg", "Grumpy Cat.jpg", "lolcat.jpg", ], "relpath": "lolcat.jpg", "task_level": [4, 3, 5, 3, 1], "task_uuid": "c7a1ec7e-93c1-4549-b916-adc28cda73a1", "timestamp": 1554248457.596115, }, ["pending", "relpath"], # XXX list of paths ), ( { "action_type": "magic-folder:rename-conflicted", "abspath_u": "/Users/vagrant/tahoe-lafs/_trial_temp/cli/MagicFolder/write-downloaded-file/foobar", "action_status": "started", "timestamp": 1554305923.406739, "replacement_path_u": "/Users/vagrant/tahoe-lafs/_trial_temp/cli/MagicFolder/write-downloaded-file/foobar.tmp", "task_level": [7, 2, 1], "task_uuid": "9e88518e-d2f4-4459-babc-e45e8a24034d", }, ["abspath_u", "replacement_path_u"], ), ( { "action_type": "magic-folder:rename-conflicted", "task_level": [7, 2, 2], "timestamp": 1554305923.408401, "result": "/Users/vagrant/tahoe-lafs/_trial_temp/cli/MagicFolder/write-downloaded-file/foobar.conflict", "action_type": "magic-folder:rename-conflicted", "action_status": "succeeded", "task_uuid": "9e88518e-d2f4-4459-babc-e45e8a24034d", }, ["result"], ), ( { "action_type": "magic-folder:rename-deleted", "abspath_u": "/Users/vagrant/tahoe-lafs/_trial_temp/immutable/Test/code/clients/2g45r67f/tmp/tmp1cdGlh/Bob-magic/file1", "task_level": [18, 5, 17, 4, 3, 3, 1], "timestamp": 1554305926.082758, "action_status": "started", "task_uuid": "14100717-85cd-41bc-bb1c-eadc418e760b", }, ["abspath_u"], ), ( { "action_type": "magic-folder:rename-deleted", "task_level": [18, 5, 17, 4, 3, 3, 2], "timestamp": 1554305926.083676, "result": "/Users/vagrant/tahoe-lafs/_trial_temp/immutable/Test/code/clients/2g45r67f/tmp/tmp1cdGlh/Bob-magic/file1", "action_type": "magic-folder:rename-deleted", "action_status": "succeeded", "task_uuid": "14100717-85cd-41bc-bb1c-eadc418e760b", }, ["result"], ), ( { "action_type": "magic-folder:scan-remote-dmd", "action_status": "started", "nickname": "admin", "task_level": [3, 2, 1], "task_uuid": "5816398c-a658-4b59-8526-f8052f63e114", "timestamp": 1554248455.52203, }, ["nickname"], # XXX MemberName ), ( { "action_type": "magic-folder:start-downloading", "action_status": "started", "direction": "downloader", "nickname": "Demo Grid", "task_level": [1], "task_uuid": "5816398c-a658-4b59-8526-f8052f63e114", "timestamp": 1554248455.417441, }, ["nickname"], ), ( { "action_type": "magic-folder:start-monitoring", "action_status": "started", "direction": "uploader", "nickname": "Demo Grid", "task_level": [1], "task_uuid": "e03e0c60-870f-43e3-ae87-5808728ad7ee", "timestamp": 1554248454.973468, }, ["nickname"], ), ( { "action_type": "magic-folder:start-uploading", "action_status": "started", "direction": "uploader", "nickname": "Demo Grid", "task_level": [1], "task_uuid": "1f7049fd-1530-4d12-8461-94e42655f1be", "timestamp": 1554248626.323862, }, ["nickname"], ), ( { "action_type": "magic-folder:stop", "task_uuid": "18bceae8-4f93-4f96-8ccf-cc51986355c6", "task_level": [25, 1], "timestamp": 1554305541.345, "nickname": "magic-folder-default", "action_status": "started", }, ["nickname"], ), ( { "action_type": "magic-folder:stop-monitoring", "action_status": "started", "task_level": [18, 5, 12, 4, 7, 2, 1], "timestamp": 1554305542.267, "nickname": "client-0", "task_uuid": "d7a30d64-992b-48ca-a0e9-84cb0d55ea37", "direction": "uploader", }, ["nickname"], ), ( { "action_type": "magic-folder:write-downloaded-file", "mtime": 1554305970.0, "is_conflict": False, "timestamp": 1554305970.864801, "abspath": "/Users/vagrant/tahoe-lafs/_trial_temp/immutable/Test/code/clients/2g45r67f/tmp/tmp4Rwmkc/Bob-magic/file2", "task_level": [20, 5, 41, 4, 3, 3, 2, 1], "size": 9, "task_uuid": "4ac59194-cbbf-43d4-8c36-c940541b608e", "action_status": "started", "now": 1554305970.864769, }, ["abspath"], ), ( { "action_type": "notify-when-pending", "task_level": [9, 3, 1], "timestamp": 1554305908.530923, "filename": "/Users/vagrant/tahoe-lafs/_trial_temp/immutable/Test/code/clients/2g45r67f/tmp/tmpC02XVl/local_dir/subdir/some-file", "action_status": "started", "task_uuid": "19b59820-a424-4773-8fd5-e6a5f2655339", }, ["filename"], ), ( { "action_type": "watchdog:inotify:any-event", "event": "FileCreatedEvent", "action_status": "started", "timestamp": 1554305884.723024, "path": "/Users/vagrant/tahoe-lafs/_trial_temp/immutable/Test/code/clients/2g45r67f/tmp/tmp_mEbEu/foo.bar", "task_level": [1], "task_uuid": "e03ccbec-f120-49a3-9264-1ae63fdb3c5e", }, ["path"], ), ], ) def test__apply_filter_by_action_type(msg, keys): for key in keys: original_value = str(msg.get(key)) filtered_msg = filter_tahoe_log_message(json.dumps(msg), "1") assert original_value not in filtered_msg @pytest.mark.parametrize( "msg,keys", [ ( { "message_type": "fni", "task_uuid": "564c8258-e36c-4455-95f0-a8c6b1abb481", "info": "Event('FILE_ACTION_ADDED', u'blam.tmp')", "task_level": [1], "timestamp": 1554305542.236, }, ["info"], ), ( { "message_type": "magic-folder:add-to-download-queue", "timestamp": 1554308128.248248, "task_level": [79, 2, 2, 2, 6], "task_uuid": "1dbadb17-3260-46d4-9a10-6177a5309060", "relpath": "/tmp/magic_folder_test", }, ["relpath"], ), ( { "message_type": "magic-folder:all-files", "task_uuid": "3082ca20-b897-45d6-9f65-a2ed4574f2d2", "task_level": [14, 2, 3, 2], "timestamp": 1554305532.329, "files": ["what1"], }, ["files"], ), ( { "message_type": "magic-folder:downloader:get-latest-file:collective-scan", "task_uuid": "a331e6e8-8e07-4393-9e49-fa2e1af46fa4", "task_level": [18, 5, 10, 2, 2], "timestamp": 1554305538.049, "dmds": ["Alice\u00f8", "Bob\u00f8"], }, ["dmds"], ), ( { "message_type": "magic-folder:item:status-change", "relpath": "foo", "task_level": [6, 4, 2, 2, 2, 2], "timestamp": 1554305907.522427, "status": "queued", "task_uuid": "b29934a9-ec4f-44d1-b987-45a8cc0d2ba2", }, ["relpath"], ), ( { "message_type": "magic-folder:maybe-upload", "relpath": "subdir/some-file", "task_level": [10, 3, 2, 5], "timestamp": 1554305908.534811, "task_uuid": "19b59820-a424-4773-8fd5-e6a5f2655339", }, ["relpath"], ), ( { "message_type": "magic-folder:notified-object-disappeared", "timestamp": 1554305910.549119, "task_level": [11, 3, 2, 5], "path": "/Users/vagrant/tahoe-lafs/_trial_temp/immutable/Test/code/clients/2g45r67f/tmp/tmpY7_3G4/local_dir/foo", "task_uuid": "b5d0c0ee-c4d1-4765-b1ed-e7ff5f556dc5", }, ["path"], ), ( { "message_type": "magic-folder:remote-dmd-entry", "pathentry": { "ctime_ns": 1554212870771407360, "last_downloaded_timestamp": 1554248457.008035, "last_downloaded_uri": "URI:CHK:452hmzwvthqbsawh6e4ua4plei:6zeihsoigv7xl7ijdmyzfa7wt5rajqhj3ppmaqgxoilt4n5srszq:1:1:201576", "last_uploaded_uri": "URI:CHK:452hmzwvthqbsawh6e4ua4plei:6zeihsoigv7xl7ijdmyzfa7wt5rajqhj3ppmaqgxoilt4n5srszq:1:1:201576", "mtime_ns": 1554212870771407360, "size": 201576, "version": 0, }, "relpath": "Garfield.jpg", "remote_uri": "URI:CHK:452hmzwvthqbsawh6e4ua4plei:6zeihsoigv7xl7ijdmyzfa7wt5rajqhj3ppmaqgxoilt4n5srszq:1:1:201576", "remote_version": 0, "task_level": [3, 2, 2], "task_uuid": "cab6c818-50d8-4759-a53a-bd0bb64a2062", "timestamp": 1554248626.503385, }, ["pathentry", "relpath", "remote_uri"], ), ( { "message_type": "magic-folder:scan-batch", "batch": ["/tmp/magic_folder_test"], "task_level": [50, 2, 2, 3, 3], "timestamp": 1554305971.962848, "task_uuid": "4ac59194-cbbf-43d4-8c36-c940541b608e", }, ["batch"], ), ( { "message_type": "magic-folder:item:status-change", "relpath": "Grumpy Cat.jpg", "status": "queued", "task_level": [2, 2, 9, 2], "task_uuid": "c7a1ec7e-93c1-4549-b916-adc28cda73a1", "timestamp": 1554248455.404471, }, ["relpath"], ), ( { "message_type": "processing", "task_uuid": "19595202-3d20-441f-946e-d409709130d4", "info": "Event('FILE_ACTION_MODIFIED', u'blam.tmp')", "task_level": [1], "timestamp": 1554305535.829, }, ["info"], ), ], ) def test__apply_filter_by_message_type(msg, keys): for key in keys: original_value = str(msg.get(key)) filtered_msg = filter_tahoe_log_message(json.dumps(msg), "1") assert original_value not in filtered_msg
>> matter of "quality of implementation". > version-dependent as the peephole optimizer itself. about code that's never called, but it is a change. Prev by Date: [Python-Dev] Add more SyntaxWarnings? Next by Date: [Python-Dev] Add more SyntaxWarnings? Previous by thread: [Python-Dev] Add more SyntaxWarnings? Next by thread: [Python-Dev] Add more SyntaxWarnings?
import builtins import logging import copy import meta from ast import * import types import inspect import numpy as np import theano import theano.tensor as T import autodiff import autodiff.utils as utils import autodiff.functions import collections logger = logging.getLogger('autodiff') # XXX FIXME This will not do - seed must be exposed. # from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams from theano.tensor.shared_randomstreams import RandomStreams global_randomstreams = RandomStreams(seed=12345) # seed = np.random.randint(1, 999999)) ######################### ######################### # from numba source import linecache import textwrap try: from meta.decompiler import decompile_func except Exception as exn: def decompile_func(*args, **kwargs): raise Exception("Could not import Meta -- Cannot recreate source " "from bytecode") def fix_ast_lineno(tree): # NOTE: A hack to fix assertion error in debug mode due to bad lineno. # Lineno must increase monotonically for co_lnotab, # the "line number table" to work correctly. # This script just set all lineno to 1 and col_offset = to 0. # This makes it impossible to do traceback, but it is not possible # anyway since we are dynamically changing the source code. for node in ast.walk(tree): # only ast.expr and ast.stmt and their subclass has lineno and # col_offset. # if isinstance(node, ast.expr) or isinstance(node, ast.stmt): node.lineno = 1 node.col_offset = 0 return tree ## Fixme: ## This should be changed to visit the AST and fix-up where a None object ## is present as this will likely not work for all AST. def _fix_ast(myast): import _ast # Remove Pass nodes from the end of the ast while len(myast.body) > 0 and isinstance(myast.body[-1], _ast.Pass): del myast.body[-1] # Add a return node at the end of the ast if not present if len(myast.body) < 1 or not isinstance(myast.body[-1], _ast.Return): name = _ast.Name(id='None', ctx=_ast.Load(), lineno=0, col_offset=0) myast.body.append(Return(name)) # remove _decorator list which sometimes confuses ast visitor try: indx = myast._fields.index('decorator_list') except ValueError: return else: myast.decorator_list = [] def get_ast(func): if func.__name__ == '<lambda>': func_def = decompile_func(func) if isinstance(func_def, Lambda): func_def = FunctionDef(name='<lambda>', args=func_def.args, body=[Return(func_def.body)], decorator_list=[]) assert isinstance(func_def, FunctionDef) return func_def try: linecache.checkcache(inspect.getsourcefile(func)) source = inspect.getsource(func) source_module = inspect.getmodule(func) except IOError: return decompile_func(func) else: # Split off decorators # TODO: This is not quite correct, we can have comments or strings # starting at column 0 and an indented function ! source = textwrap.dedent(source) decorators = 0 # decorator can have multiple lines while not source.lstrip().startswith('def'): assert source decorator, sep, source = source.partition('\n') decorators += 1 source_file = getattr(source_module, '__file__', '<unknown file>') module_ast = compile(source, source_file, "exec", PyCF_ONLY_AST, True) lineoffset = func.__code__.co_firstlineno + decorators - 1 increment_lineno(module_ast, lineoffset) assert len(module_ast.body) == 1 func_def = module_ast.body[0] _fix_ast(func_def) assert isinstance(func_def, FunctionDef) # remove docstrings (really any unassigned strings) for node in func_def.body: if isinstance(node, Expr) and isinstance(node.value, Str): func_def.body.remove(node) return func_def ######################### ######################### def get_source(ast): if hasattr(ast, '__code__'): ast = get_ast(ast) elif isinstance(ast, collections.Callable): ast = get_ast(ast.__call__) return meta.asttools.dump_python_source(ast) def print_ast(ast): if hasattr(ast, '__code__'): ast = get_ast(ast) elif isinstance(ast, collections.Callable): ast = get_ast(ast.__call__) meta.asttools.print_ast(ast) def print_source(ast): if hasattr(ast, '__code__'): ast = get_ast(ast) elif isinstance(ast, collections.Callable): ast = get_ast(ast.__call__) meta.asttools.python_source(ast) def simple_Call(func, args=None): """ Simple alias for building Call nodes that doesn't require specification of keywords, kwargs or starargs. """ args = utils.as_seq(args) call = Call(args=args, func=func, keywords=[], kwargs=None, starargs=None) return call def isvar_ast(name): """ Wraps a Name node in a call to utils.isvar. """ isvar = simple_Call(args=utils.as_seq(name), func=Attribute(attr='isvar', ctx=Load(), value=Name(ctx=Load(), id='_utils__'))) return isvar class Context(object): def __init__(self, borrowable=None, force_floatX=False, ignore=None, infer_updates=False, escape_on_error=False): self.sym_vars = dict() self.tags = dict() # FIXME do we need to hold on to all of these itermediates? # ensure these id's do not get recycled by garbage collection self._nogc = [] self._top_def = None self.infer_updates = infer_updates self.updates = collections.OrderedDict() self.borrowable = [id(b) for b in utils.as_seq(borrowable)] self.force_floatX = force_floatX self.ignore = utils.as_seq(ignore, tuple) self.escape_on_error = escape_on_error self.shadowed_containers = dict() def recompile(self, f, nested=False): """ Accepts a function f that operates on numerical objects and returns a function that operates on Theano objects. nested : bool `recompile` resets the context and sets the 'top_node' of the function, which helps in tracing arguments. By passing nested=True, this reset can be bypassed. This is used, for example, when transforming nested functions. In this case, we want to use the same context but keep it when calling recompile. """ transformer = TheanoTransformer(context=self) f_ast = get_ast(f) if not nested: self._top_def = f_ast self.tags.clear() transformed_ast = fix_missing_locations(transformer.visit(f_ast)) f_globals = f.__globals__.copy() f_globals.update(dict(_ctx__=transformer, _functions__=autodiff.functions, _T__=theano.tensor, _utils__=autodiff.utils)) if f.__closure__: f_globals.update((v, transformer.shadow(c.cell_contents)) for v, c in zip(f.__code__.co_freevars, f.__closure__)) for name in f.__code__.co_names: if name in f_globals.keys(): f_globals[name] = transformer.shadow(f_globals[name]) try: new_f = meta.decompiler.compile_func(ast_node=transformed_ast, filename='<Context-AST>', globals=f_globals) except SyntaxError as err: if "'return' with argument inside generator" in err.message: if isinstance(transformed_ast.body[-1], Return): transformed_ast.body.pop(-1) new_f = meta.decompiler.compile_func( ast_node=transformed_ast, filename='<Context-AST>', globals=f_globals) else: raise except: raise # add defaults, if necessary (meta erases them and won't recompile!) if f.__defaults__: new_f.__defaults__ = utils.clean_int_args(*f.__defaults__)[0] # recreate method, if necessary if isinstance(f, types.MethodType): new_f = types.MethodType(new_f, f.__self__) return new_f def get_symbolic(self, x): """ Attempts to retrieve the symbolic version of x. if x is an numeric object (int, float, numpy array), it must have been traced by the context during recompiled function execution. if x is a string, it must have been tagged with autodiff.functions.tag(). """ if isinstance(x, str): if x in self.sym_vars: return self.sym_vars[x] elif x in self.tags: return self.tags[x] else: raise ValueError( 'Requested the symbolic variable of tag `{0}`' ', but `{0}` was not tagged.'.format(x)) elif utils.isvar(x): return x elif id(x) in self.sym_vars: return self.sym_vars[id(x)] elif isinstance(x, int) and not isinstance(x, bool) and -5 <= x <= 256: raise ValueError( 'Small integers (-5 <= x <= 256) can not be shadowed due to ' 'CPython caching. Try casting the variable as a NumPy int ' 'type or array before tracing: {0}'.format(x)) elif np.asarray(x).dtype == 'object': raise ValueError( 'Requested the symbolic variable shadowing object {0}, but ' 'it was not traced because it is not compatible with any ' 'Theano type.'.format(x)) else: raise ValueError( 'Requested the symbolic variable shadowing object {0}, but ' 'it was not traced because it did not appear in the ' 'function.'.format(x)) def reset(self): self.sym_vars.clear() self.tags.clear() self._nogc = [] self._top_node = None self.shadowed_containers.clear() class TheanoTransformer(NodeTransformer): def __init__(self, context): super(TheanoTransformer, self).__init__() self.context = context def ast_wrap(self, method_name, args): """ Allows Python methods to be applied to AST nodes at runtime. `method_name` is a method of the TheanoTransformer class that accepts Python objects as arguments. `args` are the AST nodes representing the arguments for `method_name` (not including `self`!). ast_wrap returns an `ast.Call()` node which calls the method on the specified arguments at runtime. """ wrapped = simple_Call(func=Attribute(attr=method_name, ctx=Load(), value=Name(ctx=Load(), id='_ctx__')), args=args) return wrapped # ** -------------------------------------------------------- # ** Direct Manipulation (Methods) def shadow(self, args): """ Helper function for `_shadow` that calls it on a flattened version of its argument. """ shadow_vars = [self._shadow_inner(x) for x in utils.flatten(args)] new_args = utils.unflatten(args, shadow_vars) if isinstance(new_args, (list, dict, tuple, set)): self.context.shadowed_containers[id(new_args)] = args # add to _nogc to ensure that the id won't be reused self.context._nogc.append(new_args) return new_args def _shadow_inner(self, x): """ Given a numerical variable x, return an equivalent Theano shared variable and store the relationship in self.sym_vars. Otherwise return x. """ # try checking if x is ignored (will fail for NumPy arrays) try: if x in self.context.ignore: return x except: pass # skip Python builtins and ignored id's if (id(x) in self.context.ignore or x is None or isinstance(x, (str, bool))): return x # skip ignored types elif isinstance(x, tuple(i for i in self.context.ignore if isinstance(i, type))): return x # transform compatible numeric values into Theano variables elif isinstance(x, (int, float, np.number, np.ndarray)): # take special care with small ints, because CPython caches them. if isinstance(x, int) and -5 <= x <= 256: x = np.int_(x) if getattr(x, 'dtype', None) == bool: logger.info('Note: Theano has no bool type; ' 'upcasting bool to int8.') x = x.astype('int8') if id(x) not in self.context.sym_vars: # store id because x will be changed if force_floatX is True id_x = id(x) # add to _nogc to ensure that the id won't be reused self.context._nogc.append(x) # check if symbolic variable should be copied or borrowed borrow = id_x in self.context.borrowable # cast x if requested if self.context.force_floatX: x = np.array(x, dtype=theano.config.floatX) # create symbolic version try: sym_x = theano.shared(x, borrow=borrow) except: sym_x = theano.shared(x) # store symbolic version self.context.sym_vars[id_x] = sym_x # return symbolic version return sym_x else: return self.context.sym_vars[id(x)] else: return x # ================================================== # ================================================== # # Runtime Modifications # # ================================================== # ================================================== @staticmethod def handle_escape(x): """ Handles escaping variables """ def escape(x): if isinstance(x, theano.tensor.sharedvar.SharedVariable): return x.get_value() elif utils.isvar(x): try: return x.eval() except Exception as e: raise ValueError( 'Could not escape {}. \nThe following error was ' 'raised when trying to call eval():\n{}'.format(x, e)) else: return x return utils.unflatten(x, [escape(i) for i in utils.flatten(x)]) def handle_int(self, x, escape=False): if escape: x = self.handle_escape(x) if utils.isvar(x) and x.ndim == 0 and 'float' in x.dtype: return x.astype('int64') elif np.asarray(x).ndim == 0 and np.asarray(x).dtype.kind == 'f': return int(x) else: return x def handle_assign_updates(self, args): target, value = args self.shadow(target) if id(target) in self.context.sym_vars and utils.isvar(value): target_var = self.context.sym_vars[id(target)] self.context.updates[target_var] = value elif (isinstance(target, T.sharedvar.SharedVariable) and target in self.context.sym_vars.values() and utils.isvar(value)): self.context.updates[target] = value return value def handle_escaped_call(self, fn, *args, **kwargs): esc_args = utils.unflatten( args, [TheanoTransformer.handle_escape(a) for a in utils.flatten(args)]) esc_kwargs = utils.unflatten( kwargs, [TheanoTransformer.handle_escape(a) for a in utils.flatten(kwargs)]) escaped_result = fn(*esc_args, **esc_kwargs) return self.shadow(escaped_result) def handle_subscript(self, x): """ Theano doesn't have a bool type, but we can track certain variables that we know must be boolean and possibly use that information (for advanced indexing, for example). We also cast non-integer scalar indices to ints (they may be coerced to floats by the force_floatX option, for example). """ if isinstance(x, (list, tuple)): # check for namedtuples, which need their __new__ args expanded if hasattr(x, '_fields'): return type(x)(*[self._handle_subscript_inner(xi) for xi in x]) else: return type(x)(self._handle_subscript_inner(xi) for xi in x) else: return self._handle_subscript_inner(x) def _handle_subscript_inner(self, x): if utils.isvar(x): if x.ndim > 0 and x.dtype == 'int8': return x.nonzero() elif x.ndim == 0 and 'int' not in x.dtype: return x.astype('int64') else: return x else: return x def handle_tag(self, obj, tag): if not isinstance(tag, str): raise ValueError('Tag must be a string. Received: {0}'.format(tag)) if tag in self.context.tags: logger.warning( '{0} was tagged as {1}, but the tag {1} was already ' 'assigned. Note that the new tag will overwrite ' 'the old one.'.format(obj, tag)) else: self.context.tags[tag] = obj if utils.isvar(obj): obj.name = tag return obj def handle_tag_function_arg(self, obj, tag): """ A version of tagging called only by visit_FunctionDef, which tags top-level function arguments and stores the tags in sym_vars. These tags can not be overwritten. """ self.context.sym_vars[tag] = obj if utils.isvar(obj): obj.name = tag def handle_functions(self, func): """ Given some function for, return another function. Generally used to exchange NumPy functions for Theano equivalents. """ # ** ======================= first handle functions defined here! if getattr(func, '__module__', None) == __name__: return func if func in self.context.ignore: return func # ** ======================= special autodiff functions elif func is autodiff.functions.escape: # escapes a variable from Tensor representation return self.handle_escape elif func is autodiff.functions.escaped_call: # call a function on escaped arguments without transforming the AST return self.handle_escaped_call elif func is autodiff.functions.tag: # tag a variable return self.handle_tag elif func is autodiff.functions.shadow: return self.shadow # ** ======================= autodiff classes elif isinstance(func, autodiff.symbolic.Symbolic): return func.symfn # ** ======================= __theano_op__ elif hasattr(func, '__theano_op__'): return func.__theano_op__ # ** ======================= array methods (with tensor instances) elif utils.isvar(getattr(func, '__self__', None)): return self.handle_methods(func.__self__, func.__name__) # ** ======================= Theano function elif (getattr(func, '__module__', None) and getattr(func, '__module__', '').startswith('theano')): return func elif isinstance(func, T.elemwise.Elemwise): return func # ** ======================= type/casting functions and new builtins elif type(func) is type: # range if func is range: def range_(*args): int_args = (self.handle_int(a, escape=True) for a in args) return func(*int_args) return range_ # zip elif func is zip: def zip_(*args): if any(utils.isvar(a) for a in args): raise TypeError( 'Called zip() on Tensor but Tensors ' 'do not support iteration. Maybe try escaping ' 'the tensor?') else: return zip(*args) return zip_ # casts elif func in(bool, np.bool_, np.bool8): logger.info('Warning: Theano has no bool type; ' 'upgrading to int8.') def bool_(x): return T.neq(x, 0) return bool_ elif func.__name__ in T.basic._cast_mapping.keys(): def cast(x): return T.cast(x, dtype=func.__name__) return cast elif func is float: def float_(x): return T.cast(x, dtype=theano.config.floatX) return float_ elif func is int: def int_(x): return T.cast(x, dtype='int' + theano.config.floatX[-2:]) return int_ # enumerate elif func is enumerate: def enumerate_(iterable, start=0): if utils.isvar(iterable): raise TypeError( 'Called enumerate() on Tensor {0} but Tensors ' 'do not support iteration. Maybe try escaping ' 'the tensor?'.format(iterable)) else: return enumerate(iterable, start=start) return enumerate_ # any other builtin function (tuple, list, set, Exception) elif func in builtins.__dict__.values(): return func else: def new_type(*args, **kwargs): try: return self.shadow(func(*args, **kwargs)) except: raise ValueError('Unsupported type: {0}'.format(func)) return new_type # ** ======================= numpy functions elif (inspect.getmodule(func) is np or (getattr(func, '__module__', None) and getattr(func, '__module__').startswith('numpy')) or isinstance(func, np.ufunc) or func in (min, max)): # abs if func in (np.abs, np.absolute): return abs # ones/zeros # FIXME submitted a PR to Theano to make syntax more # like Numpy; this change shouldn't be needed afterward. elif func in (np.ones, np.zeros): def alloc(shp, dtype=None): if (not isinstance(shp, (list, tuple)) and not utils.isvar(shp)): shp = [shp] return getattr(T, func.__name__)(shp, dtype) return alloc # handle asarray elif func is np.asarray: def _asarray(x): if not utils.isvar(x): return np.asarray(x) else: return x return _asarray # atleast_1d elif func is np.atleast_1d: def _atleast_1d(x): if x.ndim == 0: return x.dimshuffle('x') else: return x return _atleast_1d # atleast_2d elif func is np.atleast_2d: def _atleast_2d(x): if x.ndim == 0: return x.dimshuffle('x', 'x') elif x.ndim == 1: return x.dimshuffle('x', 0) else: return x return _atleast_2d # atleast_3d elif func is np.atleast_3d: def _atleast_3d(x): if x.ndim == 0: return x.dimshuffle('x', 'x', 'x') elif x.ndim == 1: return x.dimshuffle('x', 'x', 0) elif x.ndim == 2: return x.dimshuffle('x', 0, 1) else: return x return _atleast_3d # reshape elif func is np.reshape: def _reshape(*args, **kwargs): callargs = inspect.getcallargs(T.reshape, *args, **kwargs) x, newshape = callargs['x'], callargs['newshape'] if isinstance(newshape, (list, tuple)): newshape = [self.handle_int(s) for s in newshape] else: newshape = self.handle_int(newshape) return T.reshape(x, newshape) return _reshape # vstack elif func is np.vstack: def _vstack(tup): return T.vertical_stack(*tup) return _vstack # hstack elif func is np.hstack: def _hstack(tup): return T.horizontal_stack(*tup) return _hstack # transpose elif func is np.transpose: def _transpose(a, axes=None): if axes is not None: axes = [self.handle_int(a, escape=True) for a in axes] return T.transpose(x=a, axes=axes) return _transpose # functions taking axis as an argument -- make sure to escape it elif func in (np.argmax, np.argmin, np.argsort, np.concatenate, np.max, np.mean, np.min, np.prod, np.std, np.sum, np.var): def reduce_(*args, **kwargs): func_name = func.__name__ if func_name == 'amax': func_name = 'max' elif func_name == 'amin': func_name = 'min' theano_func = getattr(T, func_name) if 'axis' in kwargs: kwargs['axis'] = self.handle_int( kwargs['axis'], escape=True) elif len(args) >= 2: args = list(args) args[1] = self.handle_int(args[1], escape=True) # sometimes Theano uses 'a', sometimes it uses 'x' if func not in (np.concatenate,): np_first_arg = inspect.getargspec(func).args[0] t_first_arg = inspect.getargspec(theano_func).args[0] if np_first_arg in kwargs: if np_first_arg != t_first_arg: kwargs[t_first_arg] = kwargs.pop(np_first_arg) return theano_func(*args, **kwargs) return reduce_ # get equivalent Theano function elif hasattr(T, func.__name__): return getattr(T, func.__name__) else: raise ValueError( 'Autodiff unsupported function: {0}'.format(func)) # ** ======================= ignore the inspect module elif inspect.getmodule(func) is inspect: return func # ** ======================= built-ins elif '<built-in' in str(func): # def escaped_random(*args, **kwargs): # return self.handle_escaped_call(func, *args, **kwargs) # return escaped_random def handle_size(size): if not utils.isvar(size): if not isinstance(size, (list, tuple)): size = [size] size = [self.handle_int(s) for s in size] else: if size.ndim == 0: size = size.dimshuffle('x') size = size.astype('int64') return size # uniform random numbers (np.random.uniform) if func is np.random.uniform: def rand_u(low=0.0, high=1.0, size=1): size = handle_size(size) return global_randomstreams.uniform(low=low, high=high, size=size) return rand_u # standard uniform random numbers (np.random.random, np.random.rand) elif func in (np.random.random, np.random.rand): def rand_u(size): size = handle_size(size) return global_randomstreams.uniform(size=size) return rand_u # normal random numbers (np.random.normal) elif func is np.random.normal: def rand_n(loc=0.0, scale=1.0, size=1): size = handle_size(size) return global_randomstreams.normal(avg=loc, std=scale, size=size) return rand_n # standard normal random numbers (np.random.randn) elif func is np.random.randn: def rand_n(*size): size = [self.handle_int(s) for s in size] return global_randomstreams.normal(size=size) return rand_n # binomial random numbers (np.random.binomial) elif func is np.random.binomial: def rand_b(n, p, size=1): size = handle_size(size) return global_randomstreams.binomial(n=n, p=p, size=size) return rand_b # isinstance elif func is isinstance: def isinstance_(obj, types): # if self.context.force_floatX: # if int in utils.as_seq(types): # logger.debug( # 'You are trying to check for ints but ' # 'force_floatX is True, so the check may fail. ' # 'Consider escaping the call.') escaped_obj = self.handle_escape(obj) if (isinstance(escaped_obj, (np.ndarray, np.number)) and obj.ndim == 0): escaped_obj = np.asscalar(escaped_obj) return isinstance(escaped_obj, self.handle_escape(types)) return isinstance_ # inplace list methods elif isinstance( getattr(func, '__self__', None), (list, dict, set, tuple)): def _inplace(*args): # check if the container is shadowing a different one if id(func.__self__) in self.context.shadowed_containers: c = self.context.shadowed_containers[id(func.__self__)] tmp = getattr(c, func.__name__)(*args) if tmp is None: return c else: return tmp else: return func(*args) return _inplace # anything else else: return func # ** ======================= A bound method not covered yet # elif isinstance(func, types.MethodType): # return func # ** ======================= Misc elif (('ipdb' in (getattr(func, '__module__', '') or []) or 'pdb' in (getattr(func, '__module__', '') or [])) and func.__name__ == 'set_trace'): return func # ** ======================= Special handling for OrderedDict views elif func in (collections.abc.ValuesView, collections.abc.KeysView, collections.abc.ItemsView): return func # ** ======================= Anything else else: try: return self.context.recompile(func, nested=True) except Exception as err: if self.context.escape_on_error: logger.warning( 'Error when recompiling {0}. Calling escaped version ' 'because escape_on_error is True.'.format(func)) def escapedfunc(*args, **kwargs): return self.handle_escaped_call(func, *args, **kwargs) return escapedfunc else: raise ValueError( 'Unsupported function: {}. The following error was ' 'raised: {}'.format(func, err)) # ** ======================= Catchall (shouldn't be called) raise ValueError( 'handle_functions: No case matched function {0}. Something is ' 'wrong -- should not reach this point!'.format(func)) def handle_methods(self, var, method_name): """ This method is called whenever: 1. An array method is requested that doesn't exist for Theano variables (like _.swapaxes()). `handle_methods` is used to supply a replacement method. Note that in this case, `handle_methods` is called directly. 2. A method is requested that DOES exist for Theano variables. In this case, `handle_methods` is called by `handle_functions` prior to calling the method. `handle_methods` is used to supply a replacement function that properly handles the supplied arguments (since they are compliant with the Numpy signature, not the Theano one). """ # if we're not dealing with a Theano variable, nothing to do here. if not utils.isvar(var): return getattr(var, method_name) # ** ======================= Reshape # Theano's reshape requires dim to be in a collection, unlike Numpy. if method_name == 'reshape': def reshape(*args, **kwargs): if 'shape' in kwargs: args = [kwargs.pop('shape')] + list(args) if args: if not isinstance(args[0], (list, tuple)): args = [args] else: args = ((),) # Theano doesn't handle (), as an arg, which NumPy interprets # as casting length-1 vectors to scalars if args == ((),): if var.ndim > 1: raise ValueError( 'Reshape with `()` as an arg can only be used ' 'with vectors of length 1.') return var[0] else: if args: args = [self.handle_int(a) for a in args[0]] if len(args) > 1: args = [args] return var.reshape(*args, **kwargs) return reshape # ** ======================= repeat elif method_name == 'repeat': def repeat(repeats, axis=None): if isinstance(repeats, (list, tuple)): repeats = [self.handle_int(r) for r in repeats] else: repeats = self.handle_int(repeats) axis = self.handle_int(axis, escape=True) return var.repeat(repeats, axis) return repeat # ** ======================= swapaxes # Theano has no swapaxes method elif method_name == 'swapaxes': def swapaxes(*args, **kwargs): axis1, axis2 = (int(self.handle_escape(a)) for a in args) dims = list(range(var.ndim)) dims[axis1], dims[axis2] = dims[axis2], dims[axis1] return var.dimshuffle(*dims) return swapaxes # ** ======================= astype # Theano doesn't process numpy dtype objects or 'bool' elif method_name == 'astype': def astype(*args, **kwargs): dtype = kwargs.pop('dtype', None) if not dtype: dtype = args[0] if not isinstance(dtype, str): # get numpy dtype objects like np.float32 try: dtype = dtype.__name__ except: raise NotImplementedError( 'Unsupported dtype: {0}'.format(dtype)) if 'bool' in dtype: dtype = 'int8' logger.info('Warning: Theano has no bool type; ' 'upgrading to int8.') return var.astype(dtype) return astype # ** ======================= sort elif method_name == 'sort': def sort_(*args, **kwargs): raise ValueError( 'Calling an array\'s `sort()` method is not supported ' 'because in NumPy it is an inplace operation, but in ' 'Theano it is not. Please use numpy.sort() instead.') return sort_ # ** ======================= reductions elif method_name in ('argmax', 'argmin', 'argsort', 'concatenate', 'max', 'mean', 'min', 'norm', 'prod', 'std', 'sum', 'var'): def reduce_(*args, **kwargs): method = getattr(var, method_name) all_args = inspect.getcallargs(method, *args, **kwargs) for k, v in list(all_args.items()): if v is method.__self__: all_args.pop(k) all_args['axis'] = self.handle_escape(all_args['axis']) if all_args['axis'] is not None: all_args['axis'] = int(all_args['axis']) return method(**all_args) return reduce_ # ** ======================= anything else # ...Otherwise, try to access the method on the Theano variable else: return getattr(var, method_name) def handle_comparison(self, operator, left, right): """ This method is called whenever an operator is encountered with a single rhs comparator, since tensors do not properly them. """ if utils.isvar(left) or utils.isvar(right): return getattr(T, operator)(left, right) elif operator == 'gt': return left > right elif operator == 'ge': return left >= right elif operator == 'lt': return left < right elif operator == 'le': return left <= right elif operator == 'eq': return left == right elif operator == 'neq': return left != right else: # shouldn't ever reach here! raise ValueError( 'Not sure how to handle operator: {0}'.format(operator)) # ** -------------------------------------------------------- # ** AST Manipulation (Node Visitors) def insert_breakpoint(self, _): import ipdb; ipdb.set_trace() def visit_Assign_with_updates(self, node): """ Given an assignment, attempt to infer a symbolic update from the target and value. """ load_targets = copy.deepcopy(node.targets) value = node.value for t in load_targets: load_transformer.generic_visit(t) node_with_updates = copy.deepcopy(node) node_with_updates.value = self.ast_wrap( 'handle_assign_updates', List( ctx=Load(), elts=load_targets + [value])) body=[node_with_updates] # wrap this in a try because if this is the first time a variable # is being assigned, then load_targets will try to reference # a nonexistant variable! return Try( body=body, handlers=[ExceptHandler(body=[node])], finalbody=[], orelse=[]) def visit_Assign(self, node): """ Applies the following transformations: - Transform subscripts. Tensor variables do not support inplace assignment, so subscript assigns must be changed to call the `set_subtensor` function. Statements of the form: x[a:b][c] = y Become: if utils.isvar(x): x = T.set_subtensor(x[a:b], T.set_subtensor(x[a:b][c], y)) else: x[a:b][c] = y """ # TODO # AugAssigns with unbounded subscripts decompile strangely and can't # be recompiled. Specifically, they decompile as an Assign to a target # with a value that is an AugAssign of the same target and the true # value. To get around this, we just take the AugAssign (which appears # to be correct) and replace the Assign with it. # This is the syntax that creates the weird AST: # a[:b] += c # if isinstance(node.value, AugAssign): # return self.visit_AugAssign(node.value) # handle subscripted assignment for tensor variables if isinstance(node.targets[0], Subscript): # helper function to transform subscript into (possibly nested) # T.set_subtensor statements def build_subt(subscript, value): subscript_load = Subscript(ctx=Load(), slice=subscript.slice, value=subscript.value) set_subtensor = simple_Call( args=[subscript_load, value], func=Attribute(attr='set_subtensor', ctx=Load(), value=Name(ctx=Load(), id='_T__'))) if isinstance(subscript.value, Subscript): set_subtensor = build_subt(subscript.value, set_subtensor) return set_subtensor # get root tensor; check for nested subscripts tensor = node.targets[0] while not isinstance(tensor, Name): try: tensor = tensor.value except: break if isinstance(tensor, Name): # transform subscript into set_subtensor if isinstance(node.value, AugAssign): value = BinOp(op=node.value.op, left=node.targets[0], right=node.value.value) else: value = node.value set_subt = build_subt(subscript=node.targets[0], value=value) # wrap set_subtensor statements in Assign to root tensor assign_subtensor = Assign(targets=[Name(ctx=Store(), id=tensor.id)], value=set_subt) # wrap assign_subtensor in If to ensure that the modification # is only applied to tensor args self.generic_visit(node.value) if self.context.infer_updates: node = self.visit_Assign_with_updates(node) return If(test=isvar_ast(tensor), body=[assign_subtensor], orelse=[node]) else: self.generic_visit(node) else: self.generic_visit(node) if self.context.infer_updates: return self.visit_Assign_with_updates(node) else: return node # ================================================== # ================================================== # # AST Modifications # # ================================================== # ================================================== def visit_Attribute(self, node): """ When dealing with an attribute, first see if the object has that attribute and return it. If not, call the handle_methods method. """ self.generic_visit(node) if isinstance(node.ctx, Store): return node else: new_node = simple_Call( args=[node.value, Str(s=node.attr), self.ast_wrap('handle_methods', [node.value, Str(s=node.attr)])], func=Name(ctx=Load(), id='getattr')) return self.ast_wrap('shadow', new_node) def visit_AugAssign(self, node): """ See documentation for self.visit_Assign() for information on transformations applied here. """ #transform into assign load_target = load_transformer.generic_visit(copy.deepcopy(node.target)) value = BinOp(op=node.op, left=self.ast_wrap('shadow', load_target), right=node.value) new_node = Assign(targets=[node.target], value=value) return self.visit_Assign(new_node) def visit_Call(self, node): """ Whenever a function is called, first pass it to the 'handle_functions' method. This method examines the function and modifies it prior to calling it. For example, it might replace `numpy.ones` with `theano.ones`. """ self.generic_visit(node) node.func = self.ast_wrap('handle_functions', node.func) # the * and ** syntax won't work if an object has been shadowed... # if node.starargs: # node.starargs = self.ast_wrap('handle_shadow_class', node.starargs) # if node.kwargs: # node.kwargs = self.ast_wrap('handle_shadow_class', node.kwargs) return node def visit_ClassDef(self, node): return node def visit_Compare(self, node): """ Replaces comparison operators with Theano functions, if either argument is a tensor variable. Prior to NumPy 1.8, this is required for all comparisons where the NumPy array is on the left; thereafter it is required only for == and !=. Given: x == y Becomes: _ctx__.handle_comparison('eq', x, y) Which internally performs: if utils.isvar(x) or utils.isvar(y): T.eq(x, y) else: x == y This could be done by directly replacing the literal comparison with the `if` clause, but this wouldn't be compatible with all code. For example, if the comparison takes place in an `if` clause, the new (and nested) `if` clause would be illegal syntax. Wrapping the `isvar` check in a function call means the syntax remains compatible. """ self.generic_visit(node) if isinstance(node.ops[0], Eq): theano_op = Str(s='eq') elif isinstance(node.ops[0], NotEq): theano_op = Str(s='neq') elif isinstance(node.ops[0], Gt): theano_op = Str(s='gt') elif isinstance(node.ops[0], GtE): theano_op = Str(s='ge') elif isinstance(node.ops[0], Lt): theano_op = Str(s='lt') elif isinstance(node.ops[0], LtE): theano_op = Str(s='le') else: # Is, IsNot, In, NotIn return node if len(node.comparators) == 1: return self.ast_wrap('handle_comparison', [theano_op, node.left, node.comparators[0]]) else: return node def visit_FunctionDef(self, node): """ When a function is defined, shadow each of its arguments immediately. The AST is modified so that a function defined as: def f(a, b=None, *c, **d): ... is changed via this method to: def f(a, b=None, *c, **d): a = self.shadow(a) b = self.shadow(b) c = self.shadow(c) d = self.shadow(d) tag(a, 'a') tag(b, 'b') for k, v in d.items(): tag(v, k) ... This way, any future references to these variables will access their shadowed values. This is important because inplace modifications do not always force the `shadow` method to get called, and so the inplace changes might not be reflected the next (and first!) time the variable is loaded. """ self.generic_visit(node) assigns = [] tags = [] # shadow and tag args for param in node.args.args: assigns.append(Assign( targets=[Name(ctx=Store(), id=param.arg)], value=self.ast_wrap('shadow', Name(ctx=Load(), id=param.arg)))) tags.append(Expr(value=self.ast_wrap( method_name='handle_tag_function_arg', args=[Name(ctx=Load(), id=param.arg), Str(s=param.arg)]))) # shadow the varargs if node.args.vararg: if isinstance(node.args.vararg, str): node.args.vararg = arg(annotation=None, arg=node.args.vararg) assigns.append(Assign( targets=[Name(ctx=Store(), id=node.args.vararg.arg)], value=self.ast_wrap('shadow', Name(ctx=Load(), id=node.args.vararg.arg)))) # shadow and tag the kwargs if node.args.kwarg: if isinstance(node.args.kwarg, str): node.args.kwarg = arg(annotation=None, arg=node.args.kwarg) assigns.append(Assign( targets=[Name(ctx=Store(), id=node.args.kwarg.arg)], value=self.ast_wrap('shadow', Name(ctx=Load(), id=node.args.kwarg.arg)))) tags.append(For( body=[Expr(value=self.ast_wrap( method_name='handle_tag_function_arg', args=[Name(ctx=Load(), id='v'), Name(ctx=Load(), id='k')]))], iter=simple_Call( func=Attribute(attr='items', ctx=Load(), value=Name(ctx=Load(), id=node.args.kwarg.arg))), orelse=[], target=Tuple(ctx=Store(), elts=[Name(ctx=Store(), id='k'), Name(ctx=Store(), id='v')]))) if node is self.context._top_def: node.body = assigns + tags + node.body self.context._top_def = None else: node.body = assigns + node.body return node def visit_If(self, node): """ Transform this: if <statement>: ... else: ... to this: if escape(<statement>): ... else: ... This means that the if statement's test clause will be evaluated at runtime. Note that this does NOT carry over to the compiled Theano code. It just protects against the following case: if x: <do something> If x is a shadowed variable, then it always resolves to True. However, x could have a value of 0, in which case this shouldn't pass. Escaping x resolves it when the function is called. """ self.generic_visit(node) node.test = self.ast_wrap('handle_escape', node.test) return node def visit_Subscript(self, node): """ Theano does not have a bool dtype, and therefore does not support Numpy's advanced indexing with boolean masks. For example, the following is interpreted as requested many items at the indices 1 and 0, not as a boolean mask: x[x > 0.5] It is possible to replicate the boolean mask behavior in Theano with the following construction: x[(x > 0.5).nonzero()] tensor.nonzero() returns a tuple of indices corresponding to the nonzero elements. Thus, this properly selects the desired elements but is not compatible with Numpy comparisons anywhere else. To resolve this, if a Theano 'int8' subscript or index is requested, it is treated as a boolean mask and wrapped in a nonzero() call. NOTE THIS DOESN'T HANDLE ALL CASES """ self.generic_visit(node) if isinstance(node.slice, Index): node.slice = Index(value=self.ast_wrap('handle_subscript', node.slice.value)) return node def visit_Name(self, node): """ Whenever a literal variable name is loaded, call the 'shadow' method on its value. """ # self.generic_visit(node) if isinstance(node.ctx, Load): node = self.ast_wrap('shadow', node) return node class LoadTransformer(NodeTransformer): def generic_visit(self, node): node = super(LoadTransformer, self).generic_visit(node) if hasattr(node, 'ctx'): if isinstance(node.ctx, Store): node.ctx = Load() return node load_transformer = LoadTransformer()
Length from top of shoulder – Size A – 9½ ins. Size B – 10 ins. Length of sleeve seam – Size A, 6 ins.; Size B, 7 ins. (or length desired). 8 stitches to the inch in width, measured over plain smooth fabric. Instructions are for smaller size A. Larger size B is shown thus [B- … ]. Using No. 12 Needles and L., cast on 32 [B-36] stitches. 1st row: * K1, P1, rep. from * to end of row. Rep. 1st row sixteen times. sts, inc. once in next st, work in rib to end of row (37 [B-41] sts). NOTE: Do not break off wools, carry up side of work. ** 1st row: Using L., knit. 2nd row: K1, P1, * K1, P3, rep. from * to last 3 sts, K1, P1, K1. 3rd row: Using D., K2, * slip 1 purlways, K3, rep. from * to last 3 sts, slip 1 purlways, K2. 4th row: K2, * wool front, slip 1 purlways, wool back, K3, rep. from * to last 3 sts, wool front, slip 1 purlways, wool back, K2. 5th row: Using L., K4, * slip 1 purlways, K3, rep. from * to last st, K1. 6th row: P4, * slip 1 purlways, P3, rep. from * to last st, K1. ** Rep. from ** to ** ten [B-ten] times. Cast off 3 [B-4] sts, work in patt to last 2 sts, K2tog. in every following 4th row three times (24 [B-27] sts). Continue in patt, dec. at front edge only in every following 4th row until 18 [B-20] sts remain. Work 4 [B-4] rows without shaping. 1st row: Work to last 9 [B-10] sts, turn. 2nd row: Work to end of row. Cast off. Work to correspond with Left Front, working shapings at opposite ends of needle. Using No. 12 Needles and L., cast on 70 [B-78] stitches. 1st row: K2, * P1, K1, rep. from * to end of row. 18th row: Work 2 [B-3] sts in rib, * inc. once in next st, work 10 [B-11] sts in rib, rep. from * to last 2 [B-3] sts, inc. once in next st, work 1 [B-2] sts in rib. (77 [B-85] sts). Work as given from ** to ** for Front eleven times. Continue in patt without shaping until armholes measure same at Front armholes. 1st and 2nd rows: Work to last 9 [B-10] sts, turn. 3rd and 4th rows: Work to last 18 [B-20] sts, turn. 5th row: Work to end of row. Cast off. Using No. 12 Needles and L., cast on 38 [B-42] stitches. Rep. 1st row fourteen times. 16th row: Work 4 [B-3] sts in rib, * inc. once in next st, work 4 [B-5] sts in rib, rep. from * to last 4 [B-3] sts, inc. once in next st, work 3 [B-2] sts in rib. (45 [B-49] sts). Work as given from ** to ** for Left Front once. Continue in patt, inc. once at each end of needle in next and every following 12th row until there are 53 [B-57] sts on needle. Continue without shaping until work measures 6 [B-7] ins. (or length desired) from commencement. Dec. once at each end of needle in next and every alt. row until 25 [B-27] sts remain. Work 1 row without shaping. Cast off. Work another Sleeve in same manner. Using No. 12 Needles and L., cast on 8 stitches. 3rd row: K2, P1, K1, yo, K2tog, P1, K1. Continue in rib, working a buttonhole in every following 12th row, until 6 buttonholes have been worked from commencement. Continue without further buttonholes until band is length required to fit along fronts and across back of neck. Using a flat seam, sew up side, shoulder and sleeve seams. Sew in sleeves, placing seams to side seams. Sew front band in position. Sew on buttons to correspond with buttonholes. Finally, press all seams. The original publication of this pattern is in the public domain, however this updated digital version is copyright Sarah Bradberry, July 25th 2012. All rights reserved.
import numpy as np import gym from keras.layers import Conv2D, Dense, Input, Flatten from keras.models import Model, load_model from keras.optimizers import RMSprop from keras.utils.np_utils import to_categorical import keras.backend as K from common import LogPong from skimage.color import rgb2gray from skimage.transform import resize class Game(object): '''Class for playing an atari game.''' def __init__(self, gameName, agent, render=False, logfile=None): self.gameName = gameName self.agent = agent self.render = render self.logfile = logfile self.logger = LogPong(self.logfile) if self.logfile is not None else None def _resetEpisode(self): self.rewardSum = 0 self.episode += 1 observation = self.env.reset() return observation def play(self): '''Play the game''' self.setupGame() while True: self.step() def setupGame(self): self.env = gym.make(self.gameName) self.episode = 0 # becomes 1 when we start self.observation = self._resetEpisode() def step(self): '''Step one frame in game. Need to run setupGame before we can step. ''' if self.render: self.env.render() action = self.agent.drawAction(self.observation) # step the environment and get new measurements self.observation, reward, done, info = self.env.step(action) self.rewardSum += reward self.agent.update(reward, done, info) if done: # an episode has finished print('ep %d: reward total was %f.' % (self.episode, self.rewardSum)) if self.logger is not None: self.logger.log(self.episode, self.rewardSum) # log progress self.observation = self._resetEpisode() # class _GameSingleForMultiple(Game): # '''This class is similar to the Game class, but it used for playing multiple games. # It is created to be used with MultiGames. # ''' # def step(self): # self.observation, reward, done, info = self.env.step(action) # self.rewardSum += reward # def step(self): # '''Step one frame in game. # Need to run setupGame before we can step. # ''' # raise NotImplementedError # action = self.agent.drawAction(self.observation) # # step the environment and get new measurements # self.observation, reward, done, info = self.env.step(action) # self.rewardSum += reward # self.agent.update(reward, done, info) # if done: # an episode has finished # print('ep %d: reward total was %f.' % (self.episode, self.rewardSum)) # if self.logger is not None: # self.logger.log(self.episode, self.rewardSum) # log progress # self.observation = self._resetEpisode() # class MultiGames(Game): # '''Play multiple games with a single agent.''' # def __init__(self, gameName, nbReplicates, agent, render=False, logfile=None): # super().__init__(gameName, agent, render, logfile) # self.nbReplicates = nbReplicates # def setuptGame(self): # raise NotImplementedError('This function is not used for multiple games') # def setupGames(self): # self.envs = [gym.make(self.gameName) for _ in range(nbReplicates)] # class GameReplicates(object): # '''Play multiple replicates of the same game but NOT parallelized. # nbReplicates: Number of replicates. # gameName: The name of the game. # agents: A MultipleAgents object holding all agents. # logfile: # ''' # def __init__(self, nbReplicates, gameName, agents, logfile=None): # self.nbReplicates = nbReplicates # self.gameName = gameName # self.agents = agents # self.logfile = logfile # self.logger = LogPong(self.logfile) if self.logfile is not None else None # def setupGames(self): # # Only one game are used for logging # self.games = [Game(self.gameName, self.agents.mainAgent, False, self.logfile)] # for agent in self.agents.workerAgents: # self.games.append(Game(self.gameName, agent, False, None)) # for game in self.games: # game.setupGame() # def step(self): # '''Step through all games.''' # for game in self.games: # if not game.agent.done: game.step() # def play(self): # '''Play all games.''' # self.setupGames() # while True: # self.step() # # if all games are done # if False not in [game.agent.done for game in self.games]: # self.agents.updateAgents() # class MultipleAgents(object): # ''''Does nothing, but can possibly be used for distributed agents... # The first agent will be used for updating the model, and the model will be sent # to the others. # ''' # def __init__(self, nbReplicates, agentClass, **kwargsAgent): # self.nbReplicates = nbReplicates # self.agentClass = agentClass # self.kwargsAgent = kwargsAgent # raise NotImplementedError('Does nothing, but can possibly be used for distributed agents...') # @property # def workerAgents(self): # return self.agents[1:] # @property # def mainAgent(self): # return self.agents[0] # def setupAgents(self): # self.agents = [self.agentClass(**self.kwargsAgent) for _ in self.nbReplicates] # self.mainAgent.setupModel() # self.distributeModelToWorkers() # def updateAgents(self): # '''Update the model in the agents.''' # self.collectExperiences() # self.updateModelMainAgent() # self.distributeModelToWorkers() # self.resetExperiences() # raise NotImplementedError('Need to reset agents!!!!!!!!') # raise NotImplementedError('Need to take into account that we store last observatoin in observation list, making it longer.') # raise NotImplementedError('Set rewards if game not done') # def resetExperiences(): # '''Reset experiences of the agents''' # raise NotImplementedError # def collectExperiences(self): # for agent in self.workerAgents: # self.mainAgent.appendExperiences(agent.getExperiences()) # def updateModelMainAgent(self): # '''Perform the update of the model in the mainAgent.''' # self.mainAgent.updateModel() # def distributeModelToWorkers(self): # '''Send the main model to the worker agents.''' # for agent in self.workerAgents: # agent.model = self.mainAgent.model class Agent(object): '''Abstract class for an agent. An Agent should implement: - model: typically a keras model object. - update: update agent after every response (handle response form env. and call updataModel method). - preprocess: preprocess observation from environment. Called by drawAction. - policy: give an action based on predictions. - updateModel: update the model object. ''' model = NotImplemented # object for holding the model. def __init__(self): self.resetMemory() def update(self, reward, done, info): '''Is called to receive the feedback from the environment. It has three tasks: - store relevant feedback - update model if appropriate - handle end of game (e.g. reset some states) ''' raise NotImplementedError def preprocess(self, observation): '''Preprocess observation, and typically store in states list''' raise NotImplementedError def policy(self, pred): '''Returns an action based on given predictions.''' raise NotImplementedError def updateModel(self): '''Should do all work with updating weights.''' raise NotImplementedError def setupModel(self): '''Function for setting up the self.model object''' raise NotImplementedError def resetExperiences(self): '''Resetting agent after updating the model.''' raise NotImplementedError def resetMemory(self): '''Resets actions, states, and rewards.''' self.actions = [] self.states= [] self.rewards = [] def currentState(self): '''Returns the latest state.''' return self.states[-1] def drawAction(self, observation): '''Draw an action based on the new observation.''' self.preprocess(observation) pred = self.predict(self.currentState()) action = self.policy(pred) self.actions.append(action) return action def predict(self, states): '''Returns predictions based on give states.''' return self.model.predict(states) def getExperienes(self): '''Should return all experiences. Useful when we have multiple worker agents. ''' return [self.actions, self.states, self.rewards] def appendExperiences(self, experiences): '''Should append experiences from getExperiences(). Useful when we have multiple worker agents. ''' self.actions, self.states, self.rewards = experiences class StandardAtari(Agent): '''Abstract class for the standard atari models Includes: - preprocessing of atari images. - keras model. ''' D = 84 # Scaled images are 84x84. nbImgInState = 4 # We pass the last 4 images as a state. def preprocess(self, observation): '''Preprocess observation, and typically store in states list''' observation = self.preprocessImage(observation) newState = np.zeros((1, self.D, self.D, self.nbImgInState)) if len(self.states) != 0: newState[..., :-1] = self.currentState()[..., 1:] newState[..., -1] = observation self.states.append(newState) def preprocessImage(self, img): '''Compute luminance (grayscale in range [0, 1]) and resize to (D, D).''' img = rgb2gray(img) # compute luminance 210x160 img = resize(img, (self.D, self.D), mode='constant') # resize image return img def setupModel(self): '''Not Implemented (Just a suggestion for structure): Set up the standard DeepMind convnet in Keras. modelInputShape = (self.D, self.D, self.nbImgInState) self.model = self.deepMindAtariNet(self.nbClasses, modelInputShape, True) model.compile(...) ''' raise NotImplementedError @staticmethod def deepMindAtariNet(nbClasses, inputShape, includeTop=True): '''Set up the 3 conv layer keras model. classes: Number of outputs. inputShape: The input shape without the batch size. includeTop: If you only want the whole net, or just the convolutions. ''' inp = Input(shape=inputShape) x = Conv2D(32, 8, 8, subsample=(4, 4), activation='relu', border_mode='same', name='conv1')(inp) x = Conv2D(64, 4, 4, subsample=(2, 2), activation='relu', border_mode='same', name='conv2')(x) x = Conv2D(64, 3, 3, activation='relu', border_mode='same', name='conv3')(x) if includeTop: x = Flatten(name='flatten')(x) x = Dense(512, activation='relu', name='dense1')(x) out = Dense(nbClasses, activation='softmax', name='output')(x) else: out = x model = Model(inp, out) return model class A2C_OneGame(StandardAtari): '''Almost like the A3C agent, but without the with only one game played. nbClasses: Number of action classes. nbSteps: Number of steps before updating the agent. actionSpace: Allowed actions (passed to atari). ''' gamma = 0.99 # discount factor for reward mseBeta = 0.5 # Weighting of value mse loss. entropyBeta = 0.1 # Weighting of entropy loss. learningRate = 1e-4 decayRate = 0.99 # decay factor for RMSProp leaky sum of grad^2 def __init__(self, nbClasses, nbSteps, actionSpace, modelFileName, resume=False, setupModel=True): super().__init__() self.nbClasses = nbClasses self.nbSteps = nbSteps self.actionSpace = actionSpace self.modelFileName = modelFileName self.resume = resume if setupModel: self.setupModel() self._makeActionClassMapping() self.episode = 0 self.stepNumber = 0 # iterates every frame def resetMemory(self): '''Resets actions, states, rewards, and predicted values.''' super().resetMemory() self.valuePreds = [] def _makeActionClassMapping(self): self.action2Class = {action: i for i, action in enumerate(self.actionSpace)} self.class2Action = {i: action for i, action in enumerate(self.actionSpace)} def setupModel(self): '''Setup models: self.actionModel is the action predictions. self.valueModel is the prediction of the value function. self.model is the model with both outputs ''' if self.resume: self.model = load_model(self.modelFileName) # Need the other models as well... return inputShape = (self.D, self.D, self.nbImgInState) model = self.deepMindAtariNet(self.nbClasses, inputShape, includeTop=False) inp = Input(shape=inputShape) x = model(inp) x = Flatten()(x) x = Dense(512, activation='relu', name='dense1')(x) action = Dense(self.nbClasses, activation='softmax', name='action')(x) self.actionModel = Model(inp, action) # Should we compile model? value = Dense(1, activation='linear', name='value')(x) self.valueModel = Model(inp, value) # Should we compile model? self.model = Model(inp, [action, value]) # loss = {'action': 'categorical_crossentropy', 'value': 'mse'} # loss = {'action': categoricalCrossentropyWithWeights, 'value': 'mse'} actionAndEntropyLoss = makeActionAndEntropyLossA3C(self.entropyBeta) loss = {'action': actionAndEntropyLoss, 'value': 'mse'} loss_weights = {'action': 1, 'value': self.mseBeta} optim = RMSprop(self.learningRate, self.decayRate) self.model.compile(optim, loss) # Need to make it possible to set other optimizers def drawAction(self, observation): '''Draw an action based on the new obseravtio.''' self.preprocess(observation) actionPred, valuePred = self.predict(self.currentState()) self.valuePreds.append(valuePred) action = self.policy(actionPred) self.actions.append(action) return action def policy(self, pred): sampleClass = np.random.choice(range(self.nbClasses), 1, p=pred[0])[0] action = self.class2Action[sampleClass] return action def update(self, reward, done, info): self.rewards.append(reward) self.stepNumber += 1 if (self.stepNumber == self.nbSteps) or done: if len(self.states) == 1 + len(self.actions): self.states = self.states[1:] # The first element is from last update if not done: self.rewards[-1] = self.valuePreds[-1] self.updateModel() self.resetExperiences() self.stepNumber = 0 # prevState = self.currentState() # self.resetMemory() # self.states.append(prevState) # Store last state (if not done) if done: self.episode += 1 # self.resetMemory() if self.episode % 10 == 0: self.model.save(self.modelFileName) def resetExperiences(self, done=False): '''Resetting agent after updating the model. done: If game has passed done=True. ''' if done: self.resetMemory() else: prevState = self.currentState() self.resetMemory() self.states.append(prevState) # Store last state (if not done) def updateModel(self): rewards = np.vstack(self.rewards) discountedRewards = self._discountRewards(rewards) X = np.vstack(self.states) fakeLabels = [self.action2Class[action] for action in self.actions] Y = np.vstack(fakeLabels) valuePreds = np.vstack(self.valuePreds) actionValues = discountedRewards - valuePreds Y = responseWithSampleWeights(Y, actionValues, self.nbClasses) self.model.train_on_batch(X, [Y, discountedRewards]) def _discountRewards(self, r): """Take 1D float array of rewards and compute discounted reward """ discounted_r = np.zeros_like(r) running_add = 0 for t in reversed(range(0, r.size)): if r[t] != 0: running_add = 0 # reset the sum, since this was a game boundary (pong specific!) running_add = running_add * self.gamma + r[t] discounted_r[t] = running_add return discounted_r def getExperienes(self): '''Should return all experiences. Useful when we have multiple worker agents. ''' return [self.actions, self.states, self.rewards, self.valuePreds] def appendExperiences(self, experiences): '''Should append experiences from getExperiences(). Useful when we have multiple worker agents. ''' self.actions, self.states, self.rewards, self.valuePreds = experiences class A3C_SingleWorker(A2C_OneGame): '''Like the A3C, but it does not update the model. It only plays the game. ''' self.done = False def update(self, reward, done, info): self.rewards.append(reward) self.stepNumber += 1 if (self.stepNumber == self.nbSteps) or done: self.done = True def responseWithSampleWeights(y, sampleWeights, nbClasses): '''Function for making labels ytrueWithWeights passed to categoricalCrossentropyWithWeights(ytrueWithWeights, ypred). y: Vector with zero-indexed classes. sampleWeights: vector of sample weights. nbClasses: number of classes. returns: One-hot matrix with y, and last columns contain responses. ''' n = len(y) Y = np.zeros((n, nbClasses + 1)) Y[:, :-1] = to_categorical(y, nbClasses) Y[:, -1] = sampleWeights.flatten() return Y def categoricalCrossentropyWithWeights(ytrueWithWeights, ypred): '''Like regular categorical cross entropy, but with sample weights for every row. ytrueWithWeights is a matrix where the first columns are one hot encoder for the classes, while the last column contains the sample weights. ''' return K.categorical_crossentropy(ypred, ytrueWithWeights[:, :-1]) * ytrueWithWeights[:, -1] def entropyLoss(ypred): '''Entropy loss. Loss = - sum(pred * log(pred)) ''' return K.categorical_crossentropy(ypred, ypred) def makeActionAndEntropyLossA3C(beta): '''The part of the A3C loss function concerned with the actions, i.e. action loss and entropy loss. Here we return the loss function than can be passed to Keras. beta: Weighting of entropy. ''' def loss(ytrueWithWeights, ypred): '''Action and entropy loss for the A3C algorithm. ytrueWithWeights: A matrix where the first columns are one hot encoder for the classes, while the last column contains the sample weights. ypred: Predictions. ''' policyLoss = categoricalCrossentropyWithWeights(ytrueWithWeights, ypred) entropy = entropyLoss(ypred) return policyLoss - beta * entropy # - because the entropy is positive with minimal values in 0 and 1 return loss class KarpathyPolicyPong(Agent): '''Karpathy dense policy network.''' H = 200 # number of hidden layer neurons batch_size = 10 # every how many episodes to do a param update? learning_rate = 1e-3 gamma = 0.99 # discount factor for reward decay_rate = 0.99 # decay factor for RMSProp leaky sum of grad^2 D = 80 * 80 # input dimensionality: 80x80 grid def __init__(self, modelFileName, resume=False): super().__init__() self.modelFileName = modelFileName self.resume = resume self.prev_x = None self.episode = 0 self.setupModel() def policy(self, pred): '''Returns an action based on given predictions.''' action = 2 if np.random.uniform() < pred else 3 # roll the dice! return action def update(self, reward, done, info): '''See update func in Agent class''' self.rewards.append(reward) if done: self.episode += 1 self.prev_x = None if self.episode % self.batch_size == 0: self.updateModel() def updateModel(self): '''Should do all work with updating weights.''' print('Updating weights...') # stack together all inputs, actions, and rewards for this episode epx = np.vstack(self.states) fakeLabels = [1 if action == 2 else 0 for action in self.actions] epy = np.vstack(fakeLabels) epr = np.vstack(self.rewards) self.resetMemory() # compute the discounted reward backwards through time discounted_epr = self._discountRewards(epr) # standardize the rewards to be unit normal (helps control the gradient estimator variance) discounted_epr -= np.mean(discounted_epr) discounted_epr /= np.std(discounted_epr) # update our model weights (all in one batch) self.model.train_on_batch(epx, epy, sample_weight=discounted_epr.reshape((-1,))) if self.episode % (self.batch_size * 3) == 0: self.model.save(self.modelFileName) def _discountRewards(self, r): """ take 1D float array of rewards and compute discounted reward """ discounted_r = np.zeros_like(r) running_add = 0 for t in reversed(range(0, r.size)): if r[t] != 0: running_add = 0 # reset the sum, since this was a game boundary (pong specific!) running_add = running_add * self.gamma + r[t] discounted_r[t] = running_add return discounted_r def setupModel(self): """Make keras model""" if self.resume: self.model = load_model(self.modelFileName) else: inp = Input(shape=(self.D,)) h = Dense(self.H, activation='relu')(inp) out = Dense(1, activation='sigmoid')(h) self.model = Model(inp, out) optim = RMSprop(self.learning_rate, self.decay_rate) self.model.compile(optim, 'binary_crossentropy') @staticmethod def _preprocess_image(I): '''Preprocess 210x160x3 uint8 frame into 6400 (80x80) 1D float vector''' I = I[35:195] # crop I = I[::2,::2,0] # downsample by factor of 2 I[I == 144] = 0 # erase background (background type 1) I[I == 109] = 0 # erase background (background type 2) I[I != 0] = 1 # everything else (paddles, ball) just set to 1 return I.astype(np.float).ravel() def preprocess(self, observation): '''Proprocess observation. And store in states list''' cur_x = self._preprocess_image(observation) x = cur_x - self.prev_x if self.prev_x is not None else np.zeros(self.D) self.prev_x = cur_x x = x.reshape((1, -1)) self.states.append(x) #-------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------- def test(): render = False filename = 'test.h5' resume = False # filename = 'pong_gym_keras_mlp_full_batch.h5' # resume = True # render = True gym.undo_logger_setup() # Stop gym logging agent = KarpathyPolicyPong(filename, resume=resume) game = Game('Pong-v0', agent, render=render, logfile='test.log') game.play() def testA2C(): render = False filename = 'testA2C.h5' resume = False # resume = True # render = True gym.undo_logger_setup() # Stop gym logging actionSpace = [2, 3] agent = A2C_OneGame(2, 1024, actionSpace, filename, resume=resume) game = Game('Pong-v0', agent, render=render, logfile='test.log') game.play() if __name__ == '__main__': # test() testA2C()
Many of your metal plated seed beads suggest using an "artist's fixative" to keep the metal from wearing off. Would you spray the individual beads before using them, or the finished piece. If the finished piece, how do you get into the crevices of the piece. Will this also stop tarnishing? What can you do if a finished piece, copper in this case, has started to tarnish? Metallic and galvanized seed beads have a coated surface that will wear off with normal use; contact with chemicals and oils; exposure to sunlight; and are also susceptible to tarnishing. To extend the life of the beads finish, use a seal or fixative. Depending on the type of work used to create the seed bead design, it would be advisable to apply the fixative prior to stitching so your piece remains flexible. Beads can be temporarily strung, a fixative applied and hung to dry. You can use a brush-on formula, such as Mona Lisa™ Metal Leaf™ top coat sealant, or a spray-on option, such as SpectraFix™. Mona Lisa Metal Leaf can also be used as a fixative for dyed and hand-painted beads, polymer clay beads and gold leaf designs.
#!/usr/bin/env python from collections import OrderedDict import numpy from kernel_tuner import tune_kernel, run_kernel def tune_zeromean(): with open('zeromeanfilter.cu', 'r') as f: kernel_string = f.read() height = numpy.int32(4391) width = numpy.int32(3539) image = numpy.random.randn(height*width).astype(numpy.float32) tune_vertical(kernel_string, image, height, width) tune_horizontal(kernel_string, image, height, width) def tune_vertical(kernel_string, image, height, width): args = [height, width, image] #only one row of thread-blocks is to be created problem_size = (width, 1) grid_div_x = ["block_size_x"] grid_div_y = [] tune_params = OrderedDict() tune_params["block_size_x"] = [32*i for i in range(1,9)] tune_params["block_size_y"] = [2**i for i in range(6)] return tune_kernel("computeMeanVertically", kernel_string, problem_size, args, tune_params, grid_div_y=grid_div_y, grid_div_x=grid_div_x) def tune_horizontal(kernel_string, image, height, width): args = [height, width, image] #use only one column of thread blocks problem_size = (1, height) grid_div_x = [] grid_div_y = ["block_size_y"] tune_params = OrderedDict() tune_params["block_size_x"] = [32*i for i in range(1,9)] tune_params["block_size_y"] = [2**i for i in range(6)] return tune_kernel("computeMeanHorizontally", kernel_string, problem_size, args, tune_params, grid_div_y=grid_div_y, grid_div_x=grid_div_x) if __name__ == "__main__": tune_zeromean()
More listings & pictures have been added! More to come as available! Thanks for your patience! Sorry for the wrong pictures. These are for this auction. to be unpacked & discovered!
import hashlib import logging from scrapy.utils.misc import create_instance logger = logging.getLogger(__name__) def _path_safe(text): """ Return a filesystem-safe version of a string ``text`` >>> _path_safe('simple.org').startswith('simple.org') True >>> _path_safe('dash-underscore_.org').startswith('dash-underscore_.org') True >>> _path_safe('some@symbol?').startswith('some_symbol_') True """ pathable_slot = "".join([c if c.isalnum() or c in '-._' else '_' for c in text]) # as we replace some letters we can get collision for different slots # add we add unique part unique_slot = hashlib.md5(text.encode('utf8')).hexdigest() return '-'.join([pathable_slot, unique_slot]) class ScrapyPriorityQueue: """A priority queue implemented using multiple internal queues (typically, FIFO queues). It uses one internal queue for each priority value. The internal queue must implement the following methods: * push(obj) * pop() * close() * __len__() ``__init__`` method of ScrapyPriorityQueue receives a downstream_queue_cls argument, which is a class used to instantiate a new (internal) queue when a new priority is allocated. Only integer priorities should be used. Lower numbers are higher priorities. startprios is a sequence of priorities to start with. If the queue was previously closed leaving some priority buckets non-empty, those priorities should be passed in startprios. """ @classmethod def from_crawler(cls, crawler, downstream_queue_cls, key, startprios=()): return cls(crawler, downstream_queue_cls, key, startprios) def __init__(self, crawler, downstream_queue_cls, key, startprios=()): self.crawler = crawler self.downstream_queue_cls = downstream_queue_cls self.key = key self.queues = {} self.curprio = None self.init_prios(startprios) def init_prios(self, startprios): if not startprios: return for priority in startprios: self.queues[priority] = self.qfactory(priority) self.curprio = min(startprios) def qfactory(self, key): return create_instance(self.downstream_queue_cls, None, self.crawler, self.key + '/' + str(key)) def priority(self, request): return -request.priority def push(self, request): priority = self.priority(request) if priority not in self.queues: self.queues[priority] = self.qfactory(priority) q = self.queues[priority] q.push(request) # this may fail (eg. serialization error) if self.curprio is None or priority < self.curprio: self.curprio = priority def pop(self): if self.curprio is None: return q = self.queues[self.curprio] m = q.pop() if not q: del self.queues[self.curprio] q.close() prios = [p for p, q in self.queues.items() if q] self.curprio = min(prios) if prios else None return m def close(self): active = [] for p, q in self.queues.items(): active.append(p) q.close() return active def __len__(self): return sum(len(x) for x in self.queues.values()) if self.queues else 0 class DownloaderInterface: def __init__(self, crawler): self.downloader = crawler.engine.downloader def stats(self, possible_slots): return [(self._active_downloads(slot), slot) for slot in possible_slots] def get_slot_key(self, request): return self.downloader._get_slot_key(request, None) def _active_downloads(self, slot): """ Return a number of requests in a Downloader for a given slot """ if slot not in self.downloader.slots: return 0 return len(self.downloader.slots[slot].active) class DownloaderAwarePriorityQueue: """ PriorityQueue which takes Downloader activity into account: domains (slots) with the least amount of active downloads are dequeued first. """ @classmethod def from_crawler(cls, crawler, downstream_queue_cls, key, startprios=()): return cls(crawler, downstream_queue_cls, key, startprios) def __init__(self, crawler, downstream_queue_cls, key, slot_startprios=()): if crawler.settings.getint('CONCURRENT_REQUESTS_PER_IP') != 0: raise ValueError(f'"{self.__class__}" does not support CONCURRENT_REQUESTS_PER_IP') if slot_startprios and not isinstance(slot_startprios, dict): raise ValueError("DownloaderAwarePriorityQueue accepts " "``slot_startprios`` as a dict; " f"{slot_startprios.__class__!r} instance " "is passed. Most likely, it means the state is" "created by an incompatible priority queue. " "Only a crawl started with the same priority " "queue class can be resumed.") self._downloader_interface = DownloaderInterface(crawler) self.downstream_queue_cls = downstream_queue_cls self.key = key self.crawler = crawler self.pqueues = {} # slot -> priority queue for slot, startprios in (slot_startprios or {}).items(): self.pqueues[slot] = self.pqfactory(slot, startprios) def pqfactory(self, slot, startprios=()): return ScrapyPriorityQueue(self.crawler, self.downstream_queue_cls, self.key + '/' + _path_safe(slot), startprios) def pop(self): stats = self._downloader_interface.stats(self.pqueues) if not stats: return slot = min(stats)[1] queue = self.pqueues[slot] request = queue.pop() if len(queue) == 0: del self.pqueues[slot] return request def push(self, request): slot = self._downloader_interface.get_slot_key(request) if slot not in self.pqueues: self.pqueues[slot] = self.pqfactory(slot) queue = self.pqueues[slot] queue.push(request) def close(self): active = {slot: queue.close() for slot, queue in self.pqueues.items()} self.pqueues.clear() return active def __len__(self): return sum(len(x) for x in self.pqueues.values()) if self.pqueues else 0 def __contains__(self, slot): return slot in self.pqueues
For years, Russian oligarchs and robber barons seeking to park their "unsourced" capital offshore and away from the sticky fingers of the Kremlin, treated Swiss bank accounts (preferably anonymous) with their "no questions asked" customer policies as, well, Swiss bank accounts. One of Switzerland’s largest banks, Credit Suisse, has frozen roughly 5 billion Swiss francs ($5 billion) of money linked to Russia to avoid violating U.S. sanctions, according to its accounts, further increasing pressure on Moscow which today saw the ruble tumble to the lowest level in over two years. The crackdown on Russian funds by the second largest Swiss bank, which owned aircraft surrendered by Russian tycoon Oleg Deripaska and had lent money to Russian oligarch Viktor Vekselberg before the sanctions, is indicative of the widespread fear among European banks of retaliation by Washington for working with targeted Russian individuals and entities. “Credit Suisse works with international regulators wherever it does business to ensure compliance with sanctions, including compliance with sanctions involving Russia,” a bank spokeswoman told Reuters. The bank is complying with the latest round of anti-Russia sanctions announced in April by U.S. Treasury Secretary Steven Mnuchin meant to penalize Russia for its annexation of Crimea, involvement in the war in Syria and “attempting to subvert Western democracies”. And more is expected to follow, as Trump scrambles to prove to Robert Mueller that he did no collude with Putin. A popular financial and tourist hub for wealthy Russians with its combination of bank secrecy, political stability and glitzy ski resorts such as Zermatt and St. Moritz, Switzerland has become one of the most important destinations for money leaving Russia. Which is why, for Russia’s elite, such steps will close off an important avenue for finance as well as a safe haven for billions of rubles of their wealth. Roughly $6.2 billion, or 14% of total Russian cross-border outflows, went to Switzerland in 2017: three times as much as went to the United States, according to the Russian central bank. That money could now be blocked after earlier this month, members of Congress called for more action, including introducing new sanctions legislation “from hell”, to punish the Russian "menace." And while the U.S. sanctions do not apply to neutral Switzerland, its banks are obliged to comply because they depend on access to the dollar and could be blackballed by the United States for any missteps, a demonstration of the implied US veto power on global financial transactions that use "neutral" Swift as an intermediary and the reason behind yesterday's call by Germany’s foreign minister Heiko Maas for the creation of a new payments system independent of the US. Going back to Switzerland, the Credit Suisse asset freeze took place in the second quarter, according to Reuters, a rare event for a Swiss bank to reveal such details. Two of the bank's biggest rivals, UBS and Julius Baer said they also respected international sanctions, but declined to say whether they had taken similar steps. “UBS ... implements worldwide at least the sanctions currently imposed by Switzerland, the U.N., the EU and the U.S.,” said a spokesman for UBS. A spokesman for Julius Baer told Reuters that it “cooperates with international regulators ... in the field of sanction regulations”. Understandable, Credit Suisse's caution is the result of earlier bad experiences, most recently with the Trump administration when in 2009 the bank reached a $500 million settlement with U.S. authorities over dealings with sanctions-hit Iran. Other European banks have also been punished for ignoring US dominance in global fund flows: in 2014, France’s BNP Paribas agreed to pay a record $8.9 billion for violating U.S. sanctions against Sudan, Cuba and Iran. Meanwhile, the Swiss banking regulator FINMA does not require Swiss banks to enforce foreign sanctions, but has said they have a responsibility to minimize legal and reputational risks. In other words, it is the US which continues to call the shots even in "neutral" Switzerland. As for what venue rich Russians will pick to park their wealth next, we are confident that China will be delighted to hold their billions "safe and sound." That, or they rediscover that when it comes to avoiding capital controls, few currencies are as effective as cryptos.
# Copyright 2019 Virgil Dupras # # This software is licensed under the "GPLv3" License as described in the "LICENSE" file, # which should be included with this package. The terms are also available at # http://www.gnu.org/licenses/gpl-3.0.html import weakref from datetime import date from core.util import allsame, flatten from ..model.currency import Currencies from .base import GUIPanel from .completable_edit import CompletableEdit from .selectable_list import LinkedSelectableList from .text_field import TextField class MassEditTextField(TextField): def __init__(self, panel, fieldname): TextField.__init__(self) self._panel = panel self._attrname = '_' + fieldname self._enabledname = fieldname + '_enabled' def _update(self, newvalue): setattr(self._panel, self._attrname, newvalue) setattr(self._panel, self._enabledname, True) self._panel.view.refresh() class MassEditDateField(MassEditTextField): def _parse(self, text): return self._panel.app.parse_date(text) def _format(self, value): return self._panel.app.format_date(value) class MassEditAmountField(MassEditTextField): def _parse(self, text): return self._panel.document.parse_amount(text) def _format(self, value): return self._panel.document.format_amount(value) class MassEditionPanel(GUIPanel): def __init__(self, mainwindow): GUIPanel.__init__(self, mainwindow) self_proxy = weakref.proxy(self) self.date_field = MassEditDateField(self_proxy, 'date') self.description_field = MassEditTextField(self_proxy, 'description') self.payee_field = MassEditTextField(self_proxy, 'payee') self.checkno_field = MassEditTextField(self_proxy, 'checkno') self.from_field = MassEditTextField(self_proxy, 'from') self.to_field = MassEditTextField(self_proxy, 'to') self.amount_field = MassEditAmountField(self_proxy, 'amount') self.completable_edit = CompletableEdit(mainwindow) def setfunc(index): try: currency = Currencies.code_at_index(index) except IndexError: currency = None if currency != self_proxy.currency: self_proxy.currency = currency self_proxy.currency_enabled = currency is not None self_proxy.view.refresh() self.currency = None self.currency_list = LinkedSelectableList( items=Currencies.display_list(), setfunc=setfunc) self._init_checkboxes() # --- Private def _init_checkboxes(self): self.date_enabled = False self.description_enabled = False self.payee_enabled = False self.checkno_enabled = False self.from_enabled = False self.to_enabled = False self.amount_enabled = False self.currency_enabled = False # --- Override def _load(self, transactions): assert len(transactions) >= 2 self.can_change_accounts = all(len(t.splits) == 2 for t in transactions) self.can_change_amount = all(t.can_set_amount for t in transactions) self.date_field.value = date.today() self.description_field.text = '' self.payee_field.text = '' self.checkno_field.text = '' self.from_field.text = '' self.to_field.text = '' self.amount_field.value = 0 self.currency = None first = transactions[0] if allsame(t.date for t in transactions): self.date_field.value = first.date if allsame(t.description for t in transactions): self.description_field.text = first.description if allsame(t.payee for t in transactions): self.payee_field.text = first.payee if allsame(t.checkno for t in transactions): self.checkno_field.text = first.checkno splits = flatten(t.splits for t in transactions) splits = [s for s in splits if s.amount] if splits and allsame(s.amount.currency_code for s in splits): self.currency = splits[0].amount.currency_code else: self.currency = self.document.default_currency try: self.currency_list.select(Currencies.index(self.currency)) except IndexError: pass if self.can_change_accounts: def get_from(t): s1, s2 = t.splits return s1 if s1.amount <= 0 else s2 def get_to(t): s1, s2 = t.splits return s2 if s1.amount <= 0 else s1 def get_name(split): return split.account.name if split.account is not None else '' if allsame(get_name(get_from(t)) for t in transactions): self.from_field.text = get_name(get_from(first)) if allsame(get_name(get_to(t)) for t in transactions): self.to_field.text = get_name(get_to(first)) if self.can_change_amount: if allsame(t.amount for t in transactions): self.amount_field.value = first.amount self._init_checkboxes() def _save(self): transactions = self.mainwindow.selected_transactions kw = {} if self.date_enabled: kw['date'] = self.date_field.value if self.description_enabled: kw['description'] = self.description_field.text if self.payee_enabled: kw['payee'] = self.payee_field.text if self.checkno_enabled: kw['checkno'] = self.checkno_field.text if self.from_enabled: kw['from_'] = self.from_field.text if self.to_enabled: kw['to'] = self.to_field.text if self.amount_enabled: kw['amount'] = self.amount_field.value if self.currency_enabled: kw['currency'] = self.currency if kw: self.document.change_transactions(transactions, **kw) self.mainwindow.revalidate()
A girl’s life these days is not as simple as people may think it is. We are certainly freer than our mums and grandmas used to be. Nonetheless, we have to put up with the most demanding pressures: being pretty, being thin, being trendy, always smiling… On top of that, we have to succeed in our careers and get married by 30! While reading Bridget Jones’s diary, I was secretly hoping to find a happy ending, for I recognized myself within some traits of the main character – truth be told: any girl would! The story is about an ordinary girl, Bridget, who is on her thirties, lives in London and works in publishing. As any girl, she is obsessed with her calorie intake, her inability to cope with bad habits -such as smoking- plus, she desperately wants to find a boyfriend – a mature one, unafraid of commitment. Unfortunately, she falls for the bad kind of guys and, to make matters worse, her family is always trying to fix her up with total strangers. Bridget’s main fear is to die alone and forgotten, only to be found later on, half eaten by an animal. The characters are remarkably realistic. First there is Bridget, who feels unattractive and awful most of the time, still, she is a lovable character. She has an obnoxious mother and a sympathetic father. At work there is Perpetua – the horrible Sloane woman who bosses her around. Fortunately, Bridget has really good friends: Tom (her best friend), Jude (who is always crying because of her boyfriend, Vile Richard) and Sharon (who doesn’t have a high opinion on men). Bridget´s love life includes Daniel Cleaver, the sleazy boss, who loves flirting with her; and Mark Darcy, the divorced top lawyer, a kind-hearted man whose behavior sometimes terribly annoys her: he is shy, distant and way too formal. This puzzles Bridget, she feels love and hate altogether! I really like the relationship that Bridget has with her friends, they are extremely loyal, honest and supportive, which I think is very important in anyone’s life. There are parts in the book in which I felt terribly bad about her, one for example, is during the New Year’s Day Turkey Lunch, where she is introduced to Mark – everyone in the room is expecting them to like each other and start dating immediately. It would have been horrible to be on her shoes, feeling rejected by a complete stranger (feeling horrible and embarrassed in front of the entire family). She can´t stop thinking about what her family may think about her: ‘so that’s why Bridget isn’t married’. Helen Fielding wrote the novel as a real diary, with daily entries. Anne Collins kept the same format which, in my opinion, is a really good thing, since it is the essence of the story. Every month reflects the highs and lows of her life. It is a touching (yet hilarious) story and it is not only Bridget’s account, but a representation of many women’s lives. Helen accurately shows how single girls feel and think. I totally recommend it! It is “chick lit”; nevertheless, men should read it too. It is wondrously funny!
import pytest from addons.osfstorage import settings as osfstorage_settings from osf.models import BaseFileNode, Folder, File from osf_tests.factories import ( UserFactory, ProjectFactory, ) pytestmark = pytest.mark.django_db @pytest.fixture() def user(): return UserFactory() @pytest.fixture() def project(user): return ProjectFactory(creator=user) @pytest.fixture() def create_test_file(fake): # TODO: Copied from api_tests/utils.py. DRY this up. def _create_test_file(node, user=None, filename=None, create_guid=True): filename = filename or fake.file_name() user = user or node.creator osfstorage = node.get_addon('osfstorage') root_node = osfstorage.get_root() test_file = root_node.append_file(filename) if create_guid: test_file.get_guid(create=True) test_file.create_version(user, { 'object': '06d80e', 'service': 'cloud', osfstorage_settings.WATERBUTLER_RESOURCE: 'osf', }, { 'size': 1337, 'contentType': 'img/png' }).save() return test_file return _create_test_file def test_active_manager_does_not_return_trashed_file_nodes(project, create_test_file): create_test_file(node=project) deleted_file = create_test_file(node=project) deleted_file.delete(user=project.creator, save=True) # root folder + file + deleted_file = 3 BaseFileNodes assert BaseFileNode.objects.filter(node=project).count() == 3 # root folder + file = 2 BaseFileNodes assert BaseFileNode.active.filter(node=project).count() == 2 def test_folder_update_calls_folder_update_method(project, create_test_file): file = create_test_file(node=project) parent_folder = file.parent # the folder update method should be the Folder.update method assert parent_folder.__class__.update == Folder.update # the folder update method should not be the File update method assert parent_folder.__class__.update != File.update # the file update method should be the File update method assert file.__class__.update == File.update
This is possibly the most beautiful park I’ve ever been too and I hope you’ll agree as this park has won many international awards for its landscaping. This park is very well shaded with many large and tall trees throughout the park, It is so well shaded that it feels at least ten degrees cooler in this area than the surrounding communities, however you’ll need to bring a comfortable pair of tennis shies as it is pretty hilly here especially if you want to go down to the Rhine River Cruises or see the famous interlocking loops on the Loch Ness Monster roller coaster. Busch Gardens Williamsburg is located in historic Williamsburg, VA, USA, almost half way between Hampton and Richmond, the Capitol of Virginia. It opened on May 16, 1975 as Busch Gardens: The Old Country. It operated under that name from 1975 to 1992 when the name was slightly changed to Busch Gardens Williamsburg which lasted from 1993 to 2005. From 2006-2008, It changed slightly again to Busch Gardens Europe and then reverted to Busch Gardens Williamsburg from 2008 to the present day. Here are some photos of some of the Geese that call the Rhine River home. Busch Gardens Currently has eight themed areas and they are, (starting at the entrance and going counterclockwise around the park) Banbury Cross (England), Heatherdowns (Scotland), Killarney (Ireland), Aquitaine (France), New France, Oktoberfest (Germany), Rhinefeld (Germany), San Marco (Italy), Festa Italia (Italy), and Sesame Street Forest of Fun. We’ll start our journey off at the entrance in Banbury Cross (England). This area is mainly filled with shops to catch tourists as they’re entering and have just realized they forgot something and don’t want to go back to the hotel or house, and to catch people leaving to try to sell them just one or two more things they must have. There also is a 4D simulator here which was showing Pirates 4D when I was here last. Unfortunately I don’t have many photos of the buildings or scenery of the park as most of them are of the rides. I will post what photos I do have though. This is the only one I have of the Banbury Cross area. Next is the Aeronaut, a one of a kind Skyride that opened here in 1975. This is the only one like it ever built as there are three stations arranged in a triangular layout. The ride is a one way ride so if you board in Banbury Cross, you must disembark and get back on the ride at the Aquitaine station, do the same at the Rhinefeld station to get back to Banbury Cross. Here is an on ride video of the entire ride including looks at the drive station in Banbury Cross, the station in Aquitaine, and the tension station in Rhinefeld. Next is Heatherdowns (Scotland) and the first ride we come to is the Tweedside Train Station. The Busch Gardens Railway will take you on a two and a half mile loop in scaled down versions of the trains that use to run in Europe. After departing the Tweedside Station, you’ll make stops in Festa Italia and Caribou in New France before returning to Tweedside. The round-trip journey will take about twenty minutes though this on-ride video is about 15 minutes long. It took me almost an hour and a half riding both sides of the train to get the best possible footage. Next up is Loch Ness Monster. The Loch Ness Monster, or “Nessie” as some enthusiasts call her, was built by Arrow Dynamics and designed by the famous Ron Toomer. Both are pioneers in the roller coaster industry. This coaster opened on May 20, 1978 as the worlds tallest at 140 feet tall and the worlds first coaster to have interlocking loops. The only other one to have this feature was the Orient Express at Worlds of Fun near Kansas City, Missouri, USA. That coaster was later removed in favor of a new inverted roller coaster called the Patriot. The ride starts by ascending a lift hill slowly, then making a right turn and then dropping 114 feet into the ravine below. After that 114 foot drop, riders then climb into a U-turn over the former Land of Dragons kids play area and then down into the first interlocking loop. After that, the train runs through a of mid-course brake run and then into “Nessies’ Lair” which is three downward spiraling helices in a tunnel. After your trip through the tunnel, riders emerge at the bottom and immediately engage in a second, smaller, lift hill. Once at the top, riders make another U-turn and then makes the final drop into the bottom half of the interlocking loops. This type of coaster is a “terrain” coaster, meaning that it was specifically built to follow the terrain of the area it is in thus also making it a Custom Looping model from this company. The coaster is actually very smooth given that most coasters from Arrow Dynamics are a bit rough.I would say this coaster is completely re-ride able as I did at-least ten time during my three-day visits in 2006 and 2010. Here are the stats: The coaster is 3,240 feet long, 140 feet high, has a drop of approximately 114 feet, two inversions, a top speed of about 60 mph, and a total ride time of about two minutes and ten seconds long. This coaster was given the ACE Coaster Landmark award on June 17, 2003 for being the first roller coaster to feature interlocking loops. Busch Gardens also hosted the first ACE (American Coaster Enthusiasts) Coaster Con(vention) from June 9-11, 1978 in order to help promote the coaster. This is also the date that ACE was formally organized, members that attended this event were treated to several hours of ERT (Exclusive Ride Time) that was set aside for members of ACE. ACE has become a world-wide nonprofit organization devoted completely to the enjoyment of roller coaster across the world. You can go to ACEonline.org or search on the internet for a roller coaster club that’s closer to your area of the world. I myself am a member of the FLCC (Florida Coaster Club) and you can find them at Floridacoasterclub.com. Here is an off ride video I have of the Loch Ness Monster. This area was once known as Hastings , part of the England area, but it was re-themed in 2001 to Ireland which was the first new land at the park in over twenty years. This area does have a motion simulator ride called Europe in the Air that takes riders to all the major cities in Europe through the air. This attraction used to be where the Corkscrew Hill simulator was which was a fun Irish themed ride. There are a number of different shops and a restaurant that serves Irish cuisine. There also is an indoor theater called the Abbey Stone Theater which house a different show every few years. Just outside the village on the way to Aquitaine is the Pet Shenanigans Theater, an outdoor stage show where the animals are the stars. Thy includes house cats, dogs, pigs, birds, and other animals. There is an area here called Eagle & Wolfe Valley. You’ll see a number of rescued American Bald Eagles and a small pack of European Wolves, at-least I believe they’re European Wolves. I also believe this is where you can feed the Lorikeets as well but I do have to warn you that they are prolific poopers so bring an extra shirt to wear that you don’t mind getting a little “dirty”. Now that we’ve changed our shirts and washed off, we’ll walk to our next area. This area has a few boutiques, one station of the Aeronaut Skyride that arrives from the Banbury Cross area and departs for the Rhinefeld section, and the Royal Palace Theatre. The Royal Palace Theatre is a large covered outdoor theater that hosts many shows thought the year. The star attraction in this area is the Griffon roller coaster. The Griffon was named after a legendary creature called the Griffin, which has the body, tail, and legs of a lion and the head and wings of an eagle and eagles talons on the front paws. Griffins are though to be the king of all creatures and are known for guarding priceless possessions and treasure according to ancient mythology. This coaster was built and designed by B&M ( Bolliger & Mabillard) and opened here in 2009. It is one of the few Dive coaster models they have built and the second that was built in the USA behind SheiKra at its sister park, Busch Gardens Tampa. A Dive coaster is one that has two or three rows that seat between eight and ten people across and has a holding brake at the top of the first drop so that riders are left dangling over the edge between one to five seconds before being released. Griffon stands at 205 feet tall with the first and second drop angle at ninety degrees, has two inversions, a splashdown water effect that doesn’t get riders wet, a total track length of 3,108 feet, a top speed of 71 mph, and a ride time of about three minutes. This was the worlds first dive coaster to feature trains that are completely FLOORLESS. This is definitely a must ride and I recommend the front row for first time riders as the view from up top is simply spectacular. The splashdown effect does not get riders wet at all, in fact, it soaks unsuspecting pedestrians that are walking by the pool. Now for the next area. This area is based on the colonial area of French Canada which mainly consists of shops that are themed to and sell items related to the theme of the area. There’s also an outdoor restaurant called Trappers Smokehouse which serves smoked items like beef, ribs, and chicken. There are two rides here, one of which is a Scrambler carnival attraction called Le Catapult from the Eli Bridge Co and the other is a log flume ride called Le Scoot that opened here in 1975. The ride is pretty basic with very little theming but it was an enjoyable ride through the trees before the big drop at the end and you do get wet on this one but not completely soaked. Onward to out next area. This area is home to many different rides, shops, and restaurants including the Festhaus where, at certain times of the day, people can eat and watch a show called Entwined. That show features the most famous fairy tales from the Grimm Bros. I haven’t seen the new show yet as the old set featuring a German Band playing traditional Oktoberfest songs while actors did traditional dances under the band and everyone was wearing lederhosen. This area used to have a really good suspended coaster from Arrow Dynamics called Big Bad Wolf that was removed in 2009. The tag line for the ride was “Ride at the speed of fright”. The ride begins when the train exits the station and makes a left and then a right turn and gets pulled up a small lift hill behind the Der Autobahn and Der Autobahn Jr. bumper cars ride. Then the coaster travels through a German village and then goes off into the woods through a series of upward and downward flat spins where it ascends another, much larger, lift hill about 100 feet high. Once the train reaches the top, the coaster then makes a left turn and drops towards the Rhine River turning towards the banks of the river at the last second. The coaster then comes back up making a 180 degree turn while ascending, then another 180 degree turn back into the station. Here’s my on ride video I shot on my first visit in 2006 and I do apologize for the poor quality of the video. This coaster was one of the best suspended coasters I’ve ever been on and it’s best ridden at night as most of the track is hidden from the non riders. This coaster was designed by the famous Ron Toomer at Arrow Dynamics and they were both pioneers in the amusement park industry. Big Bad Wolf opened on June 15, 1984 to rave reviews from the general public and the coaster enthusiasts. It was 2,800 feet long, 100 feet high, had a top speed of about 48 mph, and a ride time of about three minutes. Here is an official on ride view from Theme Park Review of Verbloten, the coaster that eventually replaced it. This coaster was manufactured by Zierer and features two launches and three different show settings in the building the coaster travels through. This coaster opened here on May 18, 2012 and is 88 feet tall, has 2,835 feet of track length, a top speed of about 53 mph, and a ride time of about three minutes and twenty-five seconds. There was also another roller coaster in this area called Drachenfire but this coaster only operated for a few years from April 4, 1992 to July 0f 1998. The park put the coaster up for sale and nobody bought it so it was removed so the steel could be recycled in 2002. It was removed as riders thought it was a very rough coaster. It was built and designed by the same person and company that built Loch Ness Monster and Big Bad Wolf. It was 150 feet tall, had a top speed of about 60 mph and a total track length of 3,550 feet long. Here’s an official on ride video from Theme Park Review. Here’s my video of what remained of both coasters on during my last visit in 2009. Next up is our next ride. This is a indoor motion simulator dark ride that utilizes air, 3D screens, and water in most of the rooms the vehicles travel through. It is set to a ancient German King that terrorizes guests as they “tour” the castle. This utilizes the same ride technology that Universal Studios uses in their ride called “The Amazing Adventures of Spiderman” at it’s Islands of Adventure park in Orlando, Florida, USA. This drop tower was added after my last visit so I can’t tell you what it is like but it looks really thrilling as it is the tallest ride at the park. It opened on August 19, 2011 and is 249 feet tall. The gondola rotates as it rises to the top and I do believe there is music being played as it ascends the tower. Here is an on ride view from deathbyillusion on YouTube. There are a number of other rides for those not brave enough to ride the coasters including Der Wirbelwind, a rotating swings ride commonly called a Waveswinger, and Wirbelwindchen, a kids swings ride and other kiddie type rides. Now on to our next area. This was the only kiddie play place when it opened in 1994 until the opening of the Sesame Street Forest of Fun in 2009. This area may still have many of its rides, unfortunately I don’t know this as I really had no reason to venture into this area when I was there last in 2009. This area does still have its maze of jungle gyms, rope climbs, slides, and swing bridges but some of the rides have been moved and rethemed in the new area. According to Wikipedia, this area should still have the following rides. The Eggery Deggery which is a small Ferris wheel with the gondolas themed to dragon eggs, the Flutter Sputter which is a flying dragon ride, the Bug a Dug which is a ride that resembles a Music Express type ride with Lady Bug cars, Chug a Tug which is a small boat ride, and the Brook which is a wet play area. The Riffle Rapids was moved to the new area and rethemed and renamed Bert and Ernie’s Loch Adventure. This area has a few quick service food carts, shops, a carousel, a roller coaster, and the third Aeronaunt skyride station that arrives from Aquitaine and departs to Banbury Cross. This is the parks classic 1919 wooden carousel from Allen Herschel. The carousel was operating with a traveling carnival when it was abandoned at an unknown date and time. It was found inside a auctioneer’s warehouse in pieces in 1973 when Anheuser Busch, the park owner at the time, purchased it and restored it in 14 months. It really is a rare treat to see and ride a restored, classic wooden carousel. This is an Inverted roller coaster themed to an Alpine ski lift that has been taken over by a mythical beast called the Aplengeist. the themeing is actually pretty good, there’s even skis attached to the coaster trains. It was built by B&M and opened here as the tallest at 195 feet, and fastest at 67 mph, Inverted roller coaster in the world when it opened on March 22, 1997. This coaster also has one of the most incredible first drops on any coaster I’ve been on to date. It’s a curving 170 foot drop that turns almost 200 degrees to the right as it drops. Then it goes right into an Immelmann inversion, then a full loop, and then it goes right into a double inversion element called a Cobra Roll. After that, the train makes a run through the block brakes, then dives under the pedestrian bridge, then through a zero g roll, then a corkscrew, an upward flat spin then into the station brakes. Here’s an off ride view from my YouTube channel. When riding on inverted roller coasters like this one, I always recommend riding front row for your first ride and this is no exception. The views are unobstructed and you can see everything in front of you but this coaster is definitely not for the faint of heart. Now it is time to cross the bridge and enter our next area. This area is set themed to the Renaissance era Italy when Leonardo Da Vinci was alive. This area has rides, shops, and a restaurant called Ristorante della Piazza which has seating around a stage. There’s also Da Vinci’s Garden of Inventions that feature statues and rides that are based on Da Vinci’s drawings. You’ll see two of the three rides that are in this area in the photos above. In the one in the middle, you’ll see part of the Battering Ram on the left. That is a swing ship ride that doesn’t go upside down. The picture on the right is of Da Vinci’s Cradle, a standard Magic Carpet type ride that swings back and forth until it rotates over the top (not going upside down). Also in this area is a Shoot-the-Chutes ride called Escape from Pompeii. Riders board a large boat, then ride a lift hill up to the top where the boat enters the building with an ancient Roman façade. Once inside, things start to fall apart as Mt Vesuvius erupts and starts to destroy Pompeii. Fire erupts all over the place and statues and buildings start to fall and crumble and then you escape via a watery plunge into the water below soaking everyone in the boat and those nearby. This ride was built by Intamin and opened here in 1996. It’s 80 feet high, 900 feet long, and has a total ride time of about two minutes and ten seconds. Now we’re off to our next area. This area is set to the return of Marco Polo’s return from his famous trip to China. Most of the parks’ midway games are here with a festival theme. All the rides in this area are themed around Roman mythology. There are three flat rides, a water ride, and a roller coaster in this area as well as a few small gift shops where you can purchase ride related items. The flat rides are the Turkish Delight which is a spinning tea cups ride, Tradewinds, which is a permanent Music Express type ride, and Elephant Run, which is a kid friendly bobsled type ride. Next up is the roller coaster. This is one of the best steel hyper coasters I’ve ridden, it ranks just below Magnum XL 200 at Cedar Point and Nitro at Six Flags Great Adventure. This coaster was built by B&M and opened here on March 30, 1999. This coaster made world-wide news on opening day when Italian supermodel Fabio took the first ride and was promptly hit in the face by a goose at the bottom of the first drop. Fabio only got a few scrapes on his nose but the goose received far worse as it didn’t survive the encounter with Fabio. Apollo’s Chariot is 170 feet tall with a first drop of 210 feet, is 4,482 feet long, has a top speed of about 73 mph, and a ride time of about two minutes and fifteen seconds. This coaster has an enormous amount of air time which is that nice zero g feeling you get after coming over the top of a hill. I do recommend the back row to really get that air time effect. Oh by the way, you are secured by a lap bar on this coaster but don’t worry, you won’t fall out as evidenced by the millions of riders that have safely made it back to the station, including me. Here’s my off ride video of Apollo’s Chariot. Now we leave Festa Italia, cross through part of San Marco, and enter our last area. This area is just for the little ones as it is filled with rides that only the kiddies can ride or with mom or dad in tow. It opened here on April 3, 2009 and has a few gift shops, a stage for the “Sunny Days Celebration” show that features the Sesame Street characters, a few family friendly rides, and a Junior coaster. The rides include Bert and Ernie’s Loch Adventure which is a log flume ride that has water effects and doesn’t have any drops, Oscar’s Whirly Worms which is a small pirate ship style ride that slowly spins as it rocks back and forth, and Prince Elmo’s Spire which is a family friendly Shot-n-Drop tower ride from Zamperla. The family friendly junior coaster is called Grover’s Alpine Express. It is a Force 190 model from Zierer and it opened here on May 3, 2009 and stands about 24 feet high and is approximately 600 feet long. Here is an off ride view of this coaster. I hope you enjoyed reading this review and I think you’ll enjoy the many different things this park has to offer. I definitely will be making a return trip in the future.
#!/usr/bin/env python import re def oneline(infile, outfile): pattern = re.compile(r'^(\w+)(\*){1}(\d+)(\:?)') f = open(infile) geno = {} ref = '' flag = False for i in f: i = i.strip() if i and pattern.search(i): fs = i.split() if fs[0] not in geno: geno[fs[0]]='' for j in range(1, len(fs)): geno[fs[0]] += fs[j] if not flag: # the first allele is reference ref = fs[0] flag = True f.close() f = open(outfile, 'w') keys = sorted(geno.keys()) for a in keys: if a == ref: f.write(a) f.write('\t') f.write(geno[a]) f.write('\n') else: f.write(a) f.write('\t') tmp = geno[a] for k in range(len(tmp)): if tmp[k] == '*': f.write('*') elif tmp[k] == '-': f.write(geno[ref][k]) else: f.write(tmp[k]) if len(geno[ref]) > len(tmp): for k in range(len(tmp),len(geno[ref])): f.write('*') f.write('\n') def catfiles(fileList, outfile): fw = open(outfile, 'w') for f in fileList: fr = open(f) for r in fr: fw.write(r) fr.close() fw.close() if __name__ == '__main__': infiles = ['A_prot.txt', 'B_prot.txt', 'C_prot.txt', 'DMA_prot.txt', 'DMB_prot.txt', 'DOA_prot.txt', 'DOB_prot.txt','DPA_prot.txt', 'DPB_prot.txt', 'DQA_prot.txt', 'DQB_prot.txt', 'DRA_prot.txt','DRB_prot.txt'] outfiles = ['A.aln', 'B.aln', 'C.aln', 'DMA.aln','DMB.aln','DOA.aln','DOB.aln','DPA.aln', 'DPB.aln', 'DQA.aln', 'DQB.aln', 'DRA.aln','DRB.aln'] for i in range(0, len(infiles)): oneline(infiles[i], outfiles[i]) catfiles(outfiles, "aa.aln.txt")
Mellendick, Kevan; Shanahan, Lilly; Wideman, Laurie; Calkins, Susan; Keane, Susan; Lovelady, Cheryl (2018). Diets Rich in Fruits and Vegetables Are Associated with Lower Cardiovascular Disease Risk in Adolescents. Nutrients, 10(2):136.
#! /usr/bin/env python # Copyright (c) 2013-2018, Rethink Robotics Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import numpy as np import cv2 from cv_bridge import CvBridge, CvBridgeError import rospy import intera_interface def show_image_callback(img_data, xxx_todo_changeme): """The callback function to show image by using CvBridge and cv """ (edge_detection, window_name) = xxx_todo_changeme bridge = CvBridge() try: cv_image = bridge.imgmsg_to_cv2(img_data, "bgr8") except CvBridgeError as err: rospy.logerr(err) return if edge_detection == True: gray = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) blurred = cv2.GaussianBlur(gray, (3, 3), 0) # customize the second and the third argument, minVal and maxVal # in function cv2.Canny if needed get_edge = cv2.Canny(blurred, 10, 100) cv_image = np.hstack([get_edge]) edge_str = "(Edge Detection)" if edge_detection else '' cv_win_name = ' '.join([window_name, edge_str]) cv2.namedWindow(cv_win_name, 0) # refresh the image on the screen cv2.imshow(cv_win_name, cv_image) cv2.waitKey(3) def main(): """Camera Display Example Cognex Hand Camera Ranges - exposure: [0.01-100] - gain: [0-255] Head Camera Ranges: - exposure: [0-100], -1 for auto-exposure - gain: [0-79], -1 for auto-gain """ rp = intera_interface.RobotParams() valid_cameras = rp.get_camera_names() if not valid_cameras: rp.log_message(("Cannot detect any camera_config" " parameters on this robot. Exiting."), "ERROR") return arg_fmt = argparse.RawDescriptionHelpFormatter parser = argparse.ArgumentParser(formatter_class=arg_fmt, description=main.__doc__) parser.add_argument( '-c', '--camera', type=str, default="head_camera", choices=valid_cameras, help='Setup Camera Name for Camera Display') parser.add_argument( '-r', '--raw', action='store_true', help='Specify use of the raw image (unrectified) topic') parser.add_argument( '-e', '--edge', action='store_true', help='Streaming the Canny edge detection image') parser.add_argument( '-g', '--gain', type=int, help='Set gain for camera (-1 = auto)') parser.add_argument( '-x', '--exposure', type=float, help='Set exposure for camera (-1 = auto)') args = parser.parse_args(rospy.myargv()[1:]) print("Initializing node... ") rospy.init_node('camera_display', anonymous=True) cameras = intera_interface.Cameras() if not cameras.verify_camera_exists(args.camera): rospy.logerr("Could not detect the specified camera, exiting the example.") return rospy.loginfo("Opening camera '{0}'...".format(args.camera)) cameras.start_streaming(args.camera) rectify_image = not args.raw use_canny_edge = args.edge cameras.set_callback(args.camera, show_image_callback, rectify_image=rectify_image, callback_args=(use_canny_edge, args.camera)) # optionally set gain and exposure parameters if args.gain is not None: if cameras.set_gain(args.camera, args.gain): rospy.loginfo("Gain set to: {0}".format(cameras.get_gain(args.camera))) if args.exposure is not None: if cameras.set_exposure(args.camera, args.exposure): rospy.loginfo("Exposure set to: {0}".format(cameras.get_exposure(args.camera))) def clean_shutdown(): print("Shutting down camera_display node.") cv2.destroyAllWindows() rospy.on_shutdown(clean_shutdown) rospy.loginfo("Camera_display node running. Ctrl-c to quit") rospy.spin() if __name__ == '__main__': main()
WHEN Christian singer Steve Taylor arrives in Sydney next month, he will walk into a storm over his song about blowing up an abortion clinic. The American artist's song, I Blew up the Clinic Real Good , has been banned by many record stores in the US and may face problems in music shops here. Word Records Australia, local distributors of Taylor's latest album, I Predict 1990 , said the controversial track had attracted criticism because of the violent solution Taylor apparently suggests as a way of dealing with abortion clinics. that a few plastic explosives won't cure" Mr. Greg Turnbull, youth pastor at Chermside Assembly of God Church in Brisbane, has written to Word Records to register his "total disgust" with the album. Mr. Turnbull said he found it offensive that a record should be released "under the pretence of Christian music," carrying such lyrics as, "I Blew Up The Clinic Real Good". "To tell Christian young people that the use of plastic explosives will solve problems is irresponsible at least, not to mention inciting rebellion ... with words like 'Try and catch me coppers, you stinking badges better think again before you mess this boy around," he said. "After examining the rest of the lyrics I came to the conclusion that it would be easier to find a needle in a haystack than to find God in these words," Mr. Turnbull added. Mrs. Margaret Tighe, chairwoman of Right to Life Australia, was equally scathing. "This must be the work of a sick mind," she said. "We are never going to solve any problem by violence. "There have been a number of incidents in the US where people have blown up clinics, and I think this song is really a sick joke and highly inappropriate. "It sounds like he believes in such a fiery solution to what is a terrible holocaust of human life -- and I can understand his feelings. "If the concentration camps had been blown up in Germany I guess whoever did it would have been hailed as a hero -- but bombing is a vioelnt solution this movement rejects, no matter hwo passionate one's feelings may be." But 29-year-old Taylor, son of a Baptist preacher, vehemently rejects the criticisms. The singer said from the US that people tended to misunderstand the satirical nature of his songs. He was keen to correct misconceptions and had recently telephoned 140 US record stores to explain his album to them. "In essence it was like protest music -- it's wanting to see things changed." Sarah Hawkins, promotions executive of Word Records in Melbourne, said she did not expect any problems when Taylor toured Australia next month.
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import atexit import functools import os import time import fixtures import nose.plugins.attrib import testresources import testtools from tempest import clients from tempest.common import isolated_creds from tempest import config from tempest import exceptions from tempest.openstack.common import log as logging LOG = logging.getLogger(__name__) # All the successful HTTP status codes from RFC 2616 HTTP_SUCCESS = (200, 201, 202, 203, 204, 205, 206) def attr(*args, **kwargs): """A decorator which applies the nose and testtools attr decorator This decorator applies the nose attr decorator as well as the the testtools.testcase.attr if it is in the list of attributes to testtools we want to apply. """ def decorator(f): if 'type' in kwargs and isinstance(kwargs['type'], str): f = testtools.testcase.attr(kwargs['type'])(f) if kwargs['type'] == 'smoke': f = testtools.testcase.attr('gate')(f) elif 'type' in kwargs and isinstance(kwargs['type'], list): for attr in kwargs['type']: f = testtools.testcase.attr(attr)(f) if attr == 'smoke': f = testtools.testcase.attr('gate')(f) return nose.plugins.attrib.attr(*args, **kwargs)(f) return decorator def services(*args, **kwargs): """A decorator used to set an attr for each service used in a test case This decorator applies a testtools attr for each service that gets exercised by a test case. """ valid_service_list = ['compute', 'image', 'volume', 'orchestration', 'network', 'identity', 'object', 'dashboard'] def decorator(f): for service in args: if service not in valid_service_list: raise exceptions.InvalidServiceTag('%s is not a valid service' % service) attr(type=list(args))(f) return f return decorator def stresstest(*args, **kwargs): """Add stress test decorator For all functions with this decorator a attr stress will be set automatically. @param class_setup_per: allowed values are application, process, action ``application``: once in the stress job lifetime ``process``: once in the worker process lifetime ``action``: on each action @param allow_inheritance: allows inheritance of this attribute """ def decorator(f): if 'class_setup_per' in kwargs: setattr(f, "st_class_setup_per", kwargs['class_setup_per']) else: setattr(f, "st_class_setup_per", 'process') if 'allow_inheritance' in kwargs: setattr(f, "st_allow_inheritance", kwargs['allow_inheritance']) else: setattr(f, "st_allow_inheritance", False) attr(type='stress')(f) return f return decorator def skip_because(*args, **kwargs): """A decorator useful to skip tests hitting known bugs @param bug: bug number causing the test to skip @param condition: optional condition to be True for the skip to have place """ def decorator(f): @functools.wraps(f) def wrapper(*func_args, **func_kwargs): if "bug" in kwargs: if "condition" not in kwargs or kwargs["condition"] is True: msg = "Skipped until Bug: %s is resolved." % kwargs["bug"] raise testtools.TestCase.skipException(msg) return f(*func_args, **func_kwargs) return wrapper return decorator def requires_ext(*args, **kwargs): """A decorator to skip tests if an extension is not enabled @param extension @param service """ def decorator(func): @functools.wraps(func) def wrapper(*func_args, **func_kwargs): if not is_extension_enabled(kwargs['extension'], kwargs['service']): msg = "Skipped because %s extension: %s is not enabled" % ( kwargs['service'], kwargs['extension']) raise testtools.TestCase.skipException(msg) return func(*func_args, **func_kwargs) return wrapper return decorator def is_extension_enabled(extension_name, service): """A function that will check the list of enabled extensions from config """ configs = config.TempestConfig() config_dict = { 'compute': configs.compute_feature_enabled.api_extensions, 'compute_v3': configs.compute_feature_enabled.api_v3_extensions, 'volume': configs.volume_feature_enabled.api_extensions, 'network': configs.network_feature_enabled.api_extensions, } if config_dict[service][0] == 'all': return True if extension_name in config_dict[service]: return True return False # there is a mis-match between nose and testtools for older pythons. # testtools will set skipException to be either # unittest.case.SkipTest, unittest2.case.SkipTest or an internal skip # exception, depending on what it can find. Python <2.7 doesn't have # unittest.case.SkipTest; so if unittest2 is not installed it falls # back to the internal class. # # The current nose skip plugin will decide to raise either # unittest.case.SkipTest or its own internal exception; it does not # look for unittest2 or the internal unittest exception. Thus we must # monkey-patch testtools.TestCase.skipException to be the exception # the nose skip plugin expects. # # However, with the switch to testr nose may not be available, so we # require you to opt-in to this fix with an environment variable. # # This is temporary until upstream nose starts looking for unittest2 # as testtools does; we can then remove this and ensure unittest2 is # available for older pythons; then nose and testtools will agree # unittest2.case.SkipTest is the one-true skip test exception. # # https://review.openstack.org/#/c/33056 # https://github.com/nose-devs/nose/pull/699 if 'TEMPEST_PY26_NOSE_COMPAT' in os.environ: try: import unittest.case.SkipTest # convince pep8 we're using the import... if unittest.case.SkipTest: pass raise RuntimeError("You have unittest.case.SkipTest; " "no need to override") except ImportError: LOG.info("Overriding skipException to nose SkipTest") testtools.TestCase.skipException = nose.plugins.skip.SkipTest at_exit_set = set() def validate_tearDownClass(): if at_exit_set: raise RuntimeError("tearDownClass does not calls the super's " "tearDownClass in these classes: " + str(at_exit_set) + "\n" "If you see the exception, with another " "exception please do not report this one!" "If you are changing tempest code, make sure you", "are calling the super class's tearDownClass!") atexit.register(validate_tearDownClass) class BaseTestCase(testtools.TestCase, testtools.testcase.WithAttributes, testresources.ResourcedTestCase): config = config.TempestConfig() setUpClassCalled = False @classmethod def setUpClass(cls): if hasattr(super(BaseTestCase, cls), 'setUpClass'): super(BaseTestCase, cls).setUpClass() cls.setUpClassCalled = True @classmethod def tearDownClass(cls): at_exit_set.discard(cls) if hasattr(super(BaseTestCase, cls), 'tearDownClass'): super(BaseTestCase, cls).tearDownClass() def setUp(self): super(BaseTestCase, self).setUp() if not self.setUpClassCalled: raise RuntimeError("setUpClass does not calls the super's" "setUpClass in the " + self.__class__.__name__) at_exit_set.add(self.__class__) test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0) try: test_timeout = int(test_timeout) except ValueError: test_timeout = 0 if test_timeout > 0: self.useFixture(fixtures.Timeout(test_timeout, gentle=True)) if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or os.environ.get('OS_STDOUT_CAPTURE') == '1'): stdout = self.useFixture(fixtures.StringStream('stdout')).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or os.environ.get('OS_STDERR_CAPTURE') == '1'): stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) if (os.environ.get('OS_LOG_CAPTURE') != 'False' and os.environ.get('OS_LOG_CAPTURE') != '0'): log_format = '%(asctime)-15s %(message)s' self.useFixture(fixtures.LoggerFixture(nuke_handlers=False, format=log_format, level=None)) @classmethod def get_client_manager(cls): """ Returns an Openstack client manager """ cls.isolated_creds = isolated_creds.IsolatedCreds(cls.__name__) force_tenant_isolation = getattr(cls, 'force_tenant_isolation', None) if (cls.config.compute.allow_tenant_isolation or force_tenant_isolation): creds = cls.isolated_creds.get_primary_creds() username, tenant_name, password = creds os = clients.Manager(username=username, password=password, tenant_name=tenant_name, interface=cls._interface) else: os = clients.Manager(interface=cls._interface) return os @classmethod def clear_isolated_creds(cls): """ Clears isolated creds if set """ if getattr(cls, 'isolated_creds'): cls.isolated_creds.clear_isolated_creds() @classmethod def _get_identity_admin_client(cls): """ Returns an instance of the Identity Admin API client """ os = clients.AdminManager(interface=cls._interface) admin_client = os.identity_client return admin_client @classmethod def _get_client_args(cls): return ( cls.config, cls.config.identity.admin_username, cls.config.identity.admin_password, cls.config.identity.uri ) def call_until_true(func, duration, sleep_for): """ Call the given function until it returns True (and return True) or until the specified duration (in seconds) elapses (and return False). :param func: A zero argument callable that returns True on success. :param duration: The number of seconds for which to attempt a successful call of the function. :param sleep_for: The number of seconds to sleep after an unsuccessful invocation of the function. """ now = time.time() timeout = now + duration while now < timeout: if func(): return True LOG.debug("Sleeping for %d seconds", sleep_for) time.sleep(sleep_for) now = time.time() return False
In a global marketing “first” for Sloggi, over 30 countries, encompassing every continent are joining together to find the most perfect male and female bottoms in the world. A series of inaugural events is planned in each country, the venue for the “official” UK launch being the music V Festival at Chelmsford on August 18th & 19th. Here, Sloggi will have a dedicated area with DJ’s, fashion shows, fun and games, plus Sloggi photo studios where festival visitors can enter the contest. Right now, bottoms are already being uploaded on the UK website. Initial voting is running online, which also offer opportunities for social networking. The live national final will follow in London in October. The international grand final will be a glittering event in a European capital city in November and the winners will be awarded modelling contracts, plus insurance for their bottoms!
# -*- coding: utf-8 -*- # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp import api from lxml import etree from openerp.addons.base.res.res_config import \ res_config_settings class ResConfigSettings(res_config_settings): @api.model def fields_view_get(self, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): ret_val = super(ResConfigSettings, self).fields_view_get( view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu, ) page_name = ret_val['name'] doc = etree.XML(ret_val['arch']) queries = [] if page_name == 'account settings': queries += [ "//div[field[@name='module_account_reports' and \ @widget='upgrade_boolean']]", "//div[field[@name='module_account_reports_followup' and \ @widget='upgrade_boolean']]", "//div[field[@name='module_account_batch_deposit' and \ @widget='upgrade_boolean']]", ] queries += [ "//div[div[field[@widget='upgrade_boolean']]] \ /preceding-sibling::label[1]", "//div[div[field[@widget='upgrade_boolean']]]", "//div[field[@widget='upgrade_boolean']] \ /preceding-sibling::label[1]", "//div[field[@widget='upgrade_boolean']]", "//field[@widget='upgrade_boolean']", ] for query in queries: for item in doc.xpath(query): item.getparent().remove(item) ret_val['arch'] = etree.tostring(doc) return ret_val
CareCredit knows pets are family too. That's why they offer veterinary financing to help keep your most cherished family members in top shape. The CareCredit healthcare credit card helps to manage your veterinary expenses—everything from routine exams and surgery to unexpected emergencies. The card provides flexibility and convenience not just for your pets but for all members of your family! It can be used for everyday items at participating healthcare providers or merchant locations. Visit carecredit.com/partners to view retail locations that accept the card. Central Carroll Animal Emergency provides critical and emergency care for the Carroll County region. They are open 24 hours beginning Friday evenings at 7pm through Tuesday morning at 8am. They are closed during the day Tuesday- Friday and reopen each evening at 7pm. The Humane Society of Carroll County is dedicated to compassionate treatment of animals through adoption, population control, education and protection. We are dedicated to compassionate treatment. Did your dog or cat just eat something poisonous? Call your veterinarian or Pet Poison Helpline immediately. c Pet Poison Helpline is a 24-hour animal poison control service available throughout the U.S., Canada, and the Caribbean for pet owners and veterinary professionals who require assistance with treating a potentially poisoned pet. We have the ability to help every poisoned pet, with all types of poisonings, 24 hours a day. Our knowledge and expertise of pet poisons will put your mind at ease when dealing with a potential emergency. In order to provide this critical service, please be advised that there is a $49 per incident fee, payable by credit card. This fee covers the initial consultation as well as all follow-up calls associated with the management of the case.
# This source file is part of mc3p, the Minecraft Protocol Parsing Proxy. # # Copyright (C) 2011 Matthew J. McGill # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License v2 as published by # the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import struct, logging, inspect logger = logging.getLogger('parsing') class Parsem(object): """Parser/emitter.""" def __init__(self,parser,emitter): setattr(self,'parse',parser) setattr(self,'emit',emitter) def parse_byte(stream): return struct.unpack_from(">b",stream.read(1))[0] def emit_byte(b): return struct.pack(">b",b) def defmsg(msgtype, name, pairs): """Build a Parsem for a message out of (name,Parsem) pairs.""" def parse(stream): msg = {'msgtype': msgtype} for (name,parsem) in pairs: msg[name] = parsem.parse(stream) return msg def emit(msg): return ''.join([emit_unsigned_byte(msgtype), ''.join([parsem.emit(msg[name]) for (name,parsem) in pairs])]) return Parsem(parse,emit) MC_byte = Parsem(parse_byte,emit_byte) def parse_unsigned_byte(stream): return struct.unpack(">B",stream.read(1))[0] def emit_unsigned_byte(b): return struct.pack(">B",b) MC_unsigned_byte = Parsem(parse_unsigned_byte, emit_unsigned_byte) def parse_short(stream): return struct.unpack_from(">h",stream.read(2))[0] def emit_short(s): return struct.pack(">h",s) MC_short = Parsem(parse_short, emit_short) def parse_int(stream): return struct.unpack_from(">i",stream.read(4))[0] def emit_int(i): return struct.pack(">i",i) MC_int = Parsem(parse_int, emit_int) def parse_long(stream): return struct.unpack_from(">q",stream.read(8))[0] def emit_long(l): return struct.pack(">q",l) MC_long = Parsem(parse_long, emit_long) def parse_float(stream): return struct.unpack_from(">f",stream.read(4))[0] def emit_float(f): return struct.pack(">f",f) MC_float = Parsem(parse_float, emit_float) def parse_double(stream): return struct.unpack_from(">d",stream.read(8))[0] def emit_double(d): return struct.pack(">d",d) MC_double = Parsem(parse_double, emit_double) def parse_string(stream): n = parse_short(stream) if n == 0: return unicode("", encoding="utf-16-be") return unicode(stream.read(2*n), encoding="utf-16-be") def emit_string(s): return ''.join([emit_short(len(s)), s.encode("utf-16-be")]) MC_string = Parsem(parse_string, emit_string) def parse_string8(stream): n = parse_short(stream) if n == 0: return '' return stream.read(n) def emit_string8(s): return ''.join([emit_short(len(s)),s]) MC_string8 = Parsem(parse_string8, emit_string8) def parse_bool(stream): b = struct.unpack_from(">B",stream.read(1))[0] if b==0: return False else: return True def emit_bool(b): if b: return emit_unsigned_byte(1) else: return emit_unsigned_byte(0) MC_bool = Parsem(parse_bool, emit_bool) def parse_metadata(stream): data=[] type = parse_unsigned_byte(stream) while (type != 127): type = type >> 5 if type == 0: data.append(parse_byte(stream)) elif type == 1: data.append(parse_short(stream)) elif type == 2: data.append(parse_int(stream)) elif type == 3: data.append(parse_float(stream)) elif type == 4: data.append(parse_string(stream)) elif type == 5: data.append(parse_short(stream)) data.append(parse_byte(stream)) data.append(parse_short(stream)) else: logger.error(repr(stream.buf[:parse.i])) raise Exception("Unknown metadata type %d" % type) type = parse_byte(stream) return data MC_metadata = Parsem(parse_metadata,None) #Todo! Make a metadata emit! def parse_inventory(stream): n = parse_short(stream) inv = { "count": n } inv["slots"] = [parse_slot_update(stream) for i in xrange(0,n)] return inv def emit_inventory(inv): slotstr = ''.join([emit_slot_update(slot) for slot in inv['slots']]) return ''.join([emit_short(inv['count']),slotstr]) MC_inventory = Parsem(parse_inventory,emit_inventory) def parse_slot_update(stream): id = parse_short(stream) if id == -1: return None return { "item_id": id, "count": parse_byte(stream), "uses": parse_short(stream) } def emit_slot_update(update): if not update: return emit_short(-1) return ''.join([emit_short(update['item_id']), emit_byte(update['count']), emit_short(update['uses'])]) MC_slot_update = Parsem(parse_slot_update, emit_slot_update) SLOT_UPDATE_2_ITEM_IDS = set([ 0x15A, #Fishing rod 0x167, #Shears #TOOLS #sword, shovel, pickaxe, axe, hoe 0x10C, 0x10D, 0x10E, 0x10F, 0x122, #WOOD 0x110, 0x111, 0x112, 0x113, 0x123, #STONE 0x10B, 0x100, 0x101, 0x102, 0x124, #IRON 0x114, 0x115, 0x116, 0x117, 0x125, #DIAMOND 0x11B, 0x11C, 0x11D, 0x11E, 0x126, #GOLD #ARMOUR #helmet, chestplate, leggings, boots 0x12A, 0x12B, 0x12C, 0x12D, #LEATHER 0x12E, 0x12F, 0x130, 0x131, #CHAIN 0x132, 0x133, 0x134, 0x135, #IRON 0x136, 0x137, 0x138, 0x139, #DIAMOND 0x13A, 0x13B, 0x13C, 0x14D #GOLD ]) def parse_slot_update2(stream): r = parse_slot_update(stream) if r is not None and r['item_id'] in SLOT_UPDATE_2_ITEM_IDS: n = parse_short(stream) r['nbt_size'] = n if n > 0: r['nbt_data'] = stream.read(n) else: r['nbt_data'] = None return r def emit_slot_update2(update): if not update: return emit_short(-1) s = emit_slot_update(update) if update['item_id'] in SLOT_UPDATE_2_ITEM_IDS: size = update['nbt_size'] s = ''.join(s, emit_short(size)) if size >= 0: data = update['nbt_data'] s = ''.join([s, nbtdata]) return s MC_slot_update2 = Parsem(parse_slot_update2, emit_slot_update2) def parse_inventory2(stream): n = parse_short(stream) inv = { "count": n } inv["slots"] = [parse_slot_update2(stream) for i in xrange(0,n)] return inv def emit_inventory2(inv): slotstr = ''.join([emit_slot_update2(slot) for slot in inv['slots']]) return ''.join([emit_short(inv['count']),slotstr]) MC_inventory2 = Parsem(parse_inventory2,emit_inventory2) def parse_chunk(stream): n = parse_int(stream) return { 'size': n, 'data': stream.read(n) } def emit_chunk(ch): return ''.join([emit_int(ch['size']), ch['data']]) MC_chunk = Parsem(parse_chunk, emit_chunk) def parse_multi_block_change(stream): n = parse_short(stream) return {'coord_array': [parse_short(stream) for j in xrange(0,n)], 'type_array': [parse_byte(stream) for j in xrange(0,n)], 'metadata_array': [parse_byte(stream) for j in xrange(0,n)]} def emit_multi_block_change(changes): return ''.join([emit_short(len(changes['coord_array'])), ''.join([emit_short(x) for x in changes['coord_array']]), ''.join([emit_byte(x) for x in changes['type_array']]), ''.join([emit_byte(x) for x in changes['metadata_array']])]) MC_multi_block_change = Parsem(parse_multi_block_change, emit_multi_block_change) def parse_explosion_records(stream): n = parse_int(stream) return { 'count': n, 'data': [(parse_byte(stream),parse_byte(stream),parse_byte(stream)) for i in xrange(0,n)]} def emit_explosion_records(msg): return ''.join([emit_int(msg['count']), ''.join([(emit_byte(rec[0]), emit_byte(rec[1]), emit_byte(rec[2])) for rec in msg['data']])]) MC_explosion_records = Parsem(parse_explosion_records, emit_explosion_records) def parse_vehicle_data(stream): x = parse_int(stream) data = { 'unknown1': x } if x > 0: data['unknown2'] = parse_short(stream) data['unknown3'] = parse_short(stream) data['unknown4'] = parse_short(stream) return data def emit_vehicle_data(data): x = data['unknown1'] str = emit_int(x) if x > 0: str = ''.join([str, emit_int(data['unknown2']), emit_int(data['unknown3']), emit_int(data['unknown4'])]) return str MC_vehicle_data = Parsem(parse_vehicle_data, emit_vehicle_data) def parse_item_data(stream): n = parse_unsigned_byte(stream) if n == 0: return '' return stream.read(n) def emit_item_data(s): assert len(s) < 265 return ''.join([emit_unsigned_byte(len(s)),s]) MC_item_data = Parsem(parse_item_data, emit_item_data) def parse_fireball_data(stream): data = {} data['thrower_id'] = parse_int(stream) if data['thrower_id'] > 0: data['u1'] = parse_short(stream) data['u2'] = parse_short(stream) data['u3'] = parse_short(stream) return data def emit_fireball_data(data): str = emit_int(data['thrower_id']) if data['thrower_id'] > 0: str = ''.join(str, emit_short(data['u1']), emit_short(data['u2']), emit_short(data['u3'])) return str MC_fireball_data = Parsem(parse_fireball_data, emit_fireball_data)
Whats New in Active Directory 2016. Prerequisites of Active Directory Installation. Once above prerequisites completed…. Let’s Start for Active Directory Step by Step installation. Click on Start button and then click on Server Manager icon or you can access it by running command ServerManagerCmd.exe and press enter. Before installing Active directory service, we have to ensure that some settings are configure properly. We can see below some settings which have to be configured. Like: Computer name, firewall, remote management, NIC, IP, windows Update, IE security, Time zone etc. Once all done we can move for next step, Means ADDS Role installation. To install Active Directory Domain Service (ADDS). 1. Click on Dashboard Tab Then Click on “Add roles and services” or we can click Manage Tab near (notification Flag) and then “Add roles and services”. 2. In this window we will get some information related to the “Add Roles and Features Wizard”. Like what task should be verified before proceeding. Here we have nothing to do so we can skip this page by clicking on “Next”. 3. In this window we have to select type of installation. as we are going to setup new server (first physical server) so we will select “Role-based or feature-based installation” then click on “Next”. 4. In this window we have to select our server from server pool. As this is our first server so only one server is showing in the server pool list which is “XA-DC”. here XA-DC is our server name. so select that machine and click “Next”. 5. In this window we have to select which role we want to install .AS we are going to promote this machine as “Domain controller” so we have to check “Active Directory Domain Service” and click on “Next”. 6. As ADDS need some additional features and services to work properly, so a child window will be open to inform us what are the services and features which are going to be installed. Like ADDS and ADLDS tools, PowerShell Modules etc. For proper management of AD some managements tools may be required, so we have to check the “Include Management Tools (If applicable)” and click on “Next”. 7. In this window we have to select which features we need to install. Required features has been already selected previously, so we will skip this window by clicking “Next”. 8. In this window, we will get some information about the ADDS and tips to maintain Domain properly. Here we have nothing to do so we will click “Next” here. 9. In this window, we will get a short detail about the roles and feature which is going to be installed. Here is a check box to Restart the system so these roles, features and services will have installed properly. So we will check the box “Restart the destination server automatically if required”. 10. When we will check that Check box then a child window will be open to ensure the “Automatic System restart”. As it is already mentioned “If required” so we will click on “Yes”. 11. While installation is going on we can close that window. That remaining process will be continuing in background. We can access that process window from Dashboard by clicking on “Notification flag”. 12. After successful installation, we will get a link to “Promote this server to a domain controller” additionally we can export all the settings and configuration of this process by clicking on “Export Configuration Settings”. We can close this window now. 13. To promote this machine as domain controller we can use that link or we can access it via Dashboard Notification Flag. 14. Now the main DC configuration process will be start. As this is our new domain so we will select “Add a new Forest” and entered the name of new domain whatever we want. In our case I am going to create a forest (Domain) named “eXperts-Adda.com” and click “Next”. 15. Here we have to set “Forest functional level” and “Domain functional level”. Let us know 1st what are these. Functional levels determine the available Active Directory Domain Services (AD DS) domain or forest capabilities. They also determine which Windows Server operating systems you can run on domain controllers in the domain or forest. However, functional levels do not affect which operating systems you can run on workstations and member servers that are joined to the domain or forest. When we deploy AD DS, set the domain and forest functional levels to the highest value that our environment can support. This way, we can use as many AD DS features as possible. we cannot set the domain functional level to a value that is lower than the forest functional level. To know more about Forest and Domain Functional Level Check below link. One more thing as per we can increase the Forest and domain function level but can’t decrease it. So choose functional level as per environment requirement. Here we are going to set Both level to “Windows Server 2016”. So we can use more features. AS we know that this our 1st DC so this will be our Global Catalog (GC) server too. The global catalog is the set of all objects in an Active Directory Domain Services (AD DS) forest. A global catalog server is a domain controller that stores a full copy of all objects in the directory for its host domain and a partial, read-only copy of all objects for all other domains in the forest. Global catalog servers respond to global catalog queries. So finally we are going to set Forest and Domain Functional level to “Windows Server 2016” and set DSRM password too. Directory Services Restore Mode (DSRM) is a special boot mode for repairing or recovering Active Directory. It is used to log on to the computer when Active Directory has failed or needs to be restored. And then click on “Next”. 16. Here we have to set NetBIOS name which is similar to Hostname but this is work on different protocol. Here we are going to set it “eXperts-Adda” and click “Next”. 17. In this window we have to specify the location of AD DS database, AD log files and SYSVOL folder. These locations must be on NTFS partition and secure location too. For now, I am going to use its default location which is “c:\Windows\NTDS” for Database & log files and “c:\Windows\SYSVOL” for SYSVOL. Click on “Next”. 18. Before DC promotion we can review all the selection whatever we have made. Additionally, we can view the script by using we can promote DC using CMD or PowerShell. Then click on “Next”. 19. After clicking on “Next” installation process will start verification of Prerequisites for Domain Controller operation. 20. Here we can see that verification is complete. Now we can click on “Install” to start installation. 21. Installation process will take some time around 10 -15 minutes depends upon system speed. 22. Once installation will be complete we can see under Server Manager Dashboard, there is some new options and tabs available related to Active Directory. 23. Once we will click on ADDS Tab then we will see the server details on which server this Role is installed. 24. When we will open “Active Directory Users and Computer” via running command “DSA.MSC” or via browsing it from start menu, we can see that a new domain named “eXperts-Adda.com” is created now. How to verify that Active Directory Domain Service is successfully installed. Run dcdiag from command prompt, if it shows all test pass then Active directory is properly installed. By checking SRV Records in DNS Server. After Active Directory is installed, DC will register SRV records in DNS. Verify SYSVOL and Net Logon Folder Share by using net share command. Verify Database and Log files NTDS.DIT, edb. *, Res*.log. Verify Active Directory objects like computers, users and ForeignSecurityPrincipals are created in ADUC. Verify whether Default Domain Controllers OU is created and holds the all Domain Controllers. Note : You are required to be logged-in as a user to leave a feedback.
############################################################################### # Copyright (c) 2017 Merantix GmbH # All rights reserved. This program and the accompanying materials # are made available under the terms of the Eclipse Public License v1.0 # which accompanies this distribution, and is available at # http://www.eclipse.org/legal/epl-v10.html # # Contributors: # Ryan Henderson - initial API and implementation and/or initial # documentation # Josh Chen - refactor and class config ############################################################################### from keras.applications import imagenet_utils import numpy as np from PIL import Image from picasso.models.keras import KerasModel VGG16_DIM = (224, 224, 3) class KerasVGG16Model(KerasModel): def preprocess(self, raw_inputs): """ Args: raw_inputs (list of Images): a list of PIL Image objects Returns: array (float32): num images * height * width * num channels """ image_arrays = [] for raw_im in raw_inputs: im = raw_im.resize(VGG16_DIM[:2], Image.ANTIALIAS) im = im.convert('RGB') arr = np.array(im).astype('float32') image_arrays.append(arr) all_raw_inputs = np.array(image_arrays) return imagenet_utils.preprocess_input(all_raw_inputs) def decode_prob(self, class_probabilities): r = imagenet_utils.decode_predictions(class_probabilities, top=self.top_probs) results = [ [{'code': entry[0], 'name': entry[1], 'prob': '{:.3f}'.format(entry[2])} for entry in row] for row in r ] classes = imagenet_utils.CLASS_INDEX class_keys = list(classes.keys()) class_values = list(classes.values()) for result in results: for entry in result: entry['index'] = int( class_keys[class_values.index([entry['code'], entry['name']])]) return results
A Cranston Tradition Returns this Memorial Day Weekend! From May 24th to May 28th, Rockwell Amusements brings its carnival fun back to Mulligan's Island for the 8th consecutive year. As always, families can enjoy the rides, play the games, and eat the fabulous carnival food. to the entire family. With this characteristic, Rockwell is able to design a midway specific to any event with the perfect balance of spectacular, major and kiddie rides. Presenting fun, affordable entertainment, games and the all-time favorite delicious carnival food, they aim to provide the cleanest, safest and most thrilling packages that can only come from years of experience. Monday - ride unlimited until close! Tickets are $1 Each, 25 for $20 and 40 for $30; 3 - 4 - 5 depending on the ride. WEATHER MAY ALTER SOME OF THE OPENING AND CLOSING TIMES. MULLIGAN’S ISLAND VENUES ARE OPEN THIS WEEKEND.
#!/usr/bin/env ipython import os from pylab import * from numpy import * import matplotlib.patches as patches import matplotlib.transforms as transforms import console_colors as ccl import numpy as np #------------------------------ nbins = 50 # (revisar a ojo) bine por unidad de tiempo normalizado MCwant = '2' # '2', '2.2H' WangFlag = 'NaN' #'NaN' #'90' #'130' CorrShift = True #dTdays = 0.1 # revisar a ojo if CorrShift==True: prexShift = 'wShiftCorr' else: prexShift = 'woShiftCorr' #------------------------------ # NOTA: "N-mcs" y "N-sheaths" chekearlos a ojo! # varname, range-for-plot, label, N-mcs, N-sheaths VARstf = [] VARstf += [['B', [5., 19.], 'B [nT]', 63, 57]] VARstf += [['V', [380., 600.], 'Vsw [km/s]', 59, 57]] VARstf += [['rmsBoB', [0.015, 0.14], 'rms($\hat B$/|B|) [1]', 63, 57]] VARstf += [['beta', [0.1, 10.], '$\\beta$ [1]', 52, 50]] VARstf += [['Pcc', [3., 19.], 'proton density [#/cc]', 52, 50]] VARstf += [['Temp', [1e4, 3e5], 'Temp [K]', 53, 50]] VARstf += [['AlphaRatio', [0.02, 0.09], 'alpha ratio [K]', 45, 19]] nvars = len(VARstf) dir_figs = '../plots/%s/MCflag%s/' % (prexShift, MCwant) try: os.system('mkdir -p %s' % dir_figs) except: print ccl.On+ " ---> Ya existe: %s" % dir_figs + ccl.W print ccl.On+" generando figuras en: %s"%dir_figs + ccl.W fgap=0.2 # fraccion-de-gap-tolerado que escojo plotear #------------------------------ for i in range(nvars): varname = VARstf[i][0] ylims = VARstf[i][1] ylabel = VARstf[i][2] Nmc = VARstf[i][3] Nsh = VARstf[i][4] fname_sh = '../../../sheaths/ascii/MCflag%s/%s/MCflag%s_2before.4after_Wang%s_fgap%1.1f_%s.txt' % (MCwant, prexShift, MCwant, WangFlag, fgap, varname) fname_mc = '../../../mcs/ascii/MCflag%s/%s/MCflag%s_2before.4after_Wang%s_fgap%1.1f_%s.txt' % (MCwant, prexShift, MCwant, WangFlag, fgap, varname) varsh = loadtxt(fname_sh, unpack=True) varmc = loadtxt(fname_mc, unpack=True) cond_sh = varsh[0]<1.0 cond_mc = varmc[0]>0.0 #------ sheath t_sh = varsh[0][cond_sh] var_med_sh = varsh[1][cond_sh] var_avr_sh = varsh[2][cond_sh] var_std_sh = varsh[3][cond_sh] var_n_sh = varsh[4][cond_sh] #------ mc t_mc = varmc[0][cond_mc]*3. + 1.0 var_med_mc = varmc[1][cond_mc] var_avr_mc = varmc[2][cond_mc] var_std_mc = varmc[3][cond_mc] var_n_mc = varmc[4][cond_mc] #--------------------------------------------------- fig = figure(1, figsize=(11, 5.5)) ax = fig.add_subplot(111) ax.plot(t_sh, var_avr_sh, '-o', alpha=.9, c='black', markeredgecolor='none', label='average', markersize=5) ax.plot(t_mc, var_avr_mc, '-o', alpha=.9, c='black', markeredgecolor='none', markersize=5) # bandas de errores en sheath inf = var_avr_sh-var_std_sh/sqrt(var_n_sh) sup = var_avr_sh+var_std_sh/sqrt(var_n_sh) ax.fill_between(t_sh, inf, sup, facecolor='gray', alpha=0.5) # bandas de errores en MC inf = var_avr_mc - var_std_mc/sqrt(var_n_mc) sup = var_avr_mc + var_std_mc/sqrt(var_n_mc) ax.fill_between(t_mc, inf, sup, facecolor='gray', alpha=0.5) # pinta ventana de sheath trans = transforms.blended_transform_factory( ax.transData, ax.transAxes) rect1 = patches.Rectangle((0., 0.), width=1.0, height=1, transform=trans, color='orange', alpha=0.3) ax.add_patch(rect1) # pinta ventana de mc rect1 = patches.Rectangle((1., 0.), width=3.0, height=1, transform=trans, color='blue', alpha=0.2) ax.add_patch(rect1) ax.plot(t_sh, var_med_sh, '-o', markersize=5 ,alpha=.8, c='red', markeredgecolor='none', label='median') ax.plot(t_mc, var_med_mc, '-o', markersize=5 ,alpha=.8, c='red', markeredgecolor='none') ax.grid() ax.set_ylim(ylims); ax.set_xlim(-2., 7.) ax.legend(loc='upper right') ax.set_xlabel('mixed time scale [1]') ax.set_ylabel(ylabel) TITLE = '# of MCs: %d \n\ # of sheaths: %d \n\ %dbins per time unit \n\ MCflag: %s \n\ WangFlag: %s' % (Nmc, Nsh, nbins, MCwant, WangFlag) ax.set_title(TITLE) if varname=='beta': ax.set_yscale('log') #show() fname_fig = '%s/MCflag%s_2before.4after_Wang%s_fgap%1.1f_%s' % (dir_figs, MCwant, WangFlag, fgap, varname) savefig('%s.png'%fname_fig, dpi=200, format='png', bbox_inches='tight') print ccl.Rn + " ---> generamos: " + fname_fig + ccl.W #savefig('%s.pdf'%fname_fig, dpi=200, format='pdf', bbox_inches='tight') #savefig('%s.eps'%fname_fig, dpi=200, format='eps', bbox_inches='tight') close()
Chattanooga and its vicinity. Buell disposed his line from Huntsville, Ala., to McMinnsville, Warren co., Tenn. So lay the opposing armies when Kirby Smith left Knoxville to invade Kentucky. Bragg crossed the Tennessee, just above Chattanooga, on Aug. 21, with thirty-six regiments of infantry, five of cavalry, and forty guns. Louisville was his destination. He advanced among the rugged mountains towards Buell's left at McMinnsville as a feint, but fairly flanked the Nationals. This was a cavalry movement, which resulted in a battle there. The horsemen were led by General Forrest, who, for several days, had been hovering around Lebanon, Murfreesboro, and Nashville. Attempting to cut off Buell's communications, he was confronted (Aug. 30) by National cavalry under E. P. Fyffe, of Gen. T. J. Wood's division, who had made a rapid march. After a short struggle the Confederates were routed. Supposing Bragg was aiming at Nashville, Buell took immediate measures to defend that city.
''' @since: 2015-01-07 @author: moschlar ''' import sqlalchemy.types as sqlat import tw2.core as twc import tw2.bootstrap.forms as twb import tw2.jqplugins.chosen.widgets as twjc import sprox.widgets.tw2widgets.widgets as sw from sprox.sa.widgetselector import SAWidgetSelector from sprox.sa.validatorselector import SAValidatorSelector, Email from sauce.widgets.widgets import (LargeMixin, SmallMixin, AdvancedWysihtml5, MediumTextField, SmallTextField, CalendarDateTimePicker) from sauce.widgets.validators import AdvancedWysihtml5BleachValidator class ChosenPropertyMultipleSelectField(LargeMixin, twjc.ChosenMultipleSelectField, sw.PropertyMultipleSelectField): search_contains = True def _validate(self, value, state=None): value = super(ChosenPropertyMultipleSelectField, self)._validate(value, state) if self.required and not value: raise twc.ValidationError('Please select at least one value') else: return value class ChosenPropertySingleSelectField(SmallMixin, twjc.ChosenSingleSelectField, sw.PropertySingleSelectField): search_contains = True class MyWidgetSelector(SAWidgetSelector): '''Custom WidgetSelector for SAUCE Primarily uses fields from tw2.bootstrap.forms and tw2.jqplugins.chosen. ''' text_field_limit = 256 default_multiple_select_field_widget_type = ChosenPropertyMultipleSelectField default_single_select_field_widget_type = ChosenPropertySingleSelectField default_name_based_widgets = { 'name': MediumTextField, 'subject': MediumTextField, '_url': MediumTextField, 'user_name': MediumTextField, 'email_address': MediumTextField, '_display_name': MediumTextField, 'description': AdvancedWysihtml5, 'message': AdvancedWysihtml5, } def __init__(self, *args, **kwargs): self.default_widgets.update({ sqlat.String: MediumTextField, sqlat.Integer: SmallTextField, sqlat.Numeric: SmallTextField, sqlat.DateTime: CalendarDateTimePicker, sqlat.Date: twb.CalendarDatePicker, sqlat.Time: twb.CalendarTimePicker, sqlat.Binary: twb.FileField, sqlat.BLOB: twb.FileField, sqlat.PickleType: MediumTextField, sqlat.Enum: twjc.ChosenSingleSelectField, }) super(MyWidgetSelector, self).__init__(*args, **kwargs) def select(self, field): widget = super(MyWidgetSelector, self).select(field) if (issubclass(widget, sw.TextArea) and hasattr(field.type, 'length') and (field.type.length is None or field.type.length < self.text_field_limit)): widget = MediumTextField return widget class MyValidatorSelector(SAValidatorSelector): _name_based_validators = { 'email_address': Email, 'description': AdvancedWysihtml5BleachValidator, 'message': AdvancedWysihtml5BleachValidator, } # def select(self, field): # print 'MyValidatorSelector', 'select', field # return super(MyValidatorSelector, self).select(field)
The Pilgrimage To Beautiful Bali. Have you ever tried to corral six fourteen year old’s halfway across the world? Through flights, check-ins, security lines, McDonald’s lines, and more security lines? As you could imagine, it’s no easy task. But it’s a task that we have taken on ever since the inception of Grom Games, a yearly trip that puts on display the most talented youth from all corners of the globe. This year’s mission included Noah Hill, Kade Matson, Caroline Marks, Eduardo Motta, Alan Cleland and Tommy Coleman all making the pilgrimage to beautiful Bali. The events that followed are what you see above. So watch it all, from the first session to the last, and every milkshake in between..
import os import csv import json import glob import progressbar from collections import OrderedDict from utils import intersection_over_union kHaanjuHome = '/home/neohanju/Workspace/dataset' kJMHome = 'C:/Users/JM/Desktop/Data/ETRIrelated/BMVC' kCurrentHome = kJMHome kPosetrackCSVAnnotationBasePath = os.path.join(kCurrentHome, 'posetrack/annotations/csv') kCOCOKeypointsBasePath = os.path.join(kCurrentHome, 'posetrack/keypoints_COCO') def load_posetrack_csv_annotation(anno_path): with open(anno_path, 'r') as csvfile: reader = csv.reader(csvfile) keys = next(reader) dict_list = [OrderedDict(zip(keys, row)) for row in reader] return {'setname': os.path.basename(anno_path).split('.')[0], 'annotations': dict_list} def load_posetrack_cvs_annotation_all(anno_base_path=kPosetrackCSVAnnotationBasePath): file_paths = glob.glob(os.path.join(anno_base_path, '*.csv')) file_paths.sort() print('>> Read posetrack annotations') dict_list = [] for i in progressbar.progressbar(range(len(file_paths))): dict_list.append(load_posetrack_csv_annotation(file_paths[i])) return dict_list def load_coco_keypoints(keypoints_dir): dir_name = os.path.basename(keypoints_dir) file_paths = glob.glob(os.path.join(keypoints_dir, '*.json')) file_paths.sort() detections = [] for file_path in file_paths: cur_frame_dict = {'frameNumber': os.path.basename(file_path).split('_')[0], 'keypoints': []} with open(file_path, 'r') as json_file: json_data = json.loads(json_file.read()) for people_info in json_data['people']: cur_frame_dict['keypoints'].append(people_info['pose_keypoints_2d']) detections.append(cur_frame_dict) return {'setname': '_'.join(dir_name.split('_')[0:-1]), 'detections': detections} def load_coco_keypoints_all(keypoints_base_dir=kCOCOKeypointsBasePath): parent_dir_name_list = next(os.walk(keypoints_base_dir))[1] parent_dir_name_list.sort() path_list = [] for parent_dir in parent_dir_name_list: child_dir_name_list = next(os.walk(os.path.join(keypoints_base_dir, parent_dir)))[1] path_list += [os.path.join(keypoints_base_dir, parent_dir, current_dir) for current_dir in child_dir_name_list] print('>> Read keypoints from COCO model') dict_list = [load_coco_keypoints(path_list[i]) for i in progressbar.progressbar(range(len(path_list)))] return dict_list def is_keypoints_in_bbox(keypoints, bbox): # keypoints; [x0, y0, confidence_0, ..., x18, y18, confidence_18] # bbox: [xmin, ymin, xmax, ymax] [xmin, ymin, xmax, ymax] = bbox point_check_list = [1, 2, 5] for check_idx in point_check_list: if xmin > keypoints[3 * check_idx] or xmax < keypoints[3 * check_idx]: return False if ymin > keypoints[3 * check_idx + 1] or ymax < keypoints[3 * check_idx + 1]: return False return True def get_trajectories(posetrack_annotation, coco_keypoint): assert (posetrack_annotation['setname'] == coco_keypoint['setname']) # for allocation max_track_id = 0 for cur_anno in posetrack_annotation['annotations']: if max_track_id < int(cur_anno['track_id']): max_track_id = int(cur_anno['track_id']) # clustering with track ID and set bounding box anno_with_ID = [[] for _ in range(max_track_id + 1)] for cur_anno in posetrack_annotation['annotations']: x0_idx = list(cur_anno.keys()).index("x0") keypoints = list(cur_anno.items())[x0_idx:x0_idx+15*3] # list of tuples like [('x0', '213'), ...] xs = [float(point[1]) for point in keypoints[0::3] if float(point[1]) != 0] ys = [float(point[1]) for point in keypoints[1::3] if float(point[1]) != 0] cur_anno['bbox'] = [min(xs), min(ys), max(xs), max(ys)] anno_with_ID[int(cur_anno['track_id'])].append(cur_anno) # calculate bounding box of coco model's keypoints for frame_info in coco_keypoint['detections']: frame_info['bbox'] = [] for keypoints in frame_info['keypoints']: xs, ys = [], [] for p in range(0, len(keypoints), 3): if 0 == keypoints[p + 2]: continue xs.append(keypoints[p]) ys.append(keypoints[p + 1]) frame_info['bbox'].append([min(xs), min(ys), max(xs), max(ys)]) result_trajectories = [] for person in anno_with_ID: coco_idx = 0 cur_trajectory = [] for pose in person: # {bbox, frameNumber, head_x1, head_y1, head_x2, head_y2, track_id, x0, y0, is_visible_0 ... x14, y14, is_visible_14} # find concurrent coco keypoints while coco_idx < len(coco_keypoint['detections']): if int(coco_keypoint['detections'][coco_idx]['frameNumber']) < int(pose['frameNumber']): coco_idx += 1 else: break if int(coco_keypoint['detections'][coco_idx]['frameNumber']) > int(pose['frameNumber']): # there is no concurrent keypoint continue # current_coco_detections = [] # while coco_idx < len(coco_keypoint['detections']): # if int(coco_keypoint['detections'][coco_idx]['frameNumber']) == int(pose['frameNumber']): # current_coco_detections.append(coco_keypoint['detections'][coco_idx]) # coco_idx += 1 # else: # break # find matching keypoint among concurrent keypoints # criterion: largest I.O.U.(intersection over union) # but, neck and shoulders of max I.O.U. must be included by annotation box detection = coco_keypoint['detections'][coco_idx] if 0 == len(detection['keypoints']): continue bbox_iou = [intersection_over_union(pose['bbox'], detection['bbox'][i]) for i, keypoints in enumerate(detection['keypoints'])] max_iou_pos = bbox_iou.index(max(bbox_iou)) if is_keypoints_in_bbox(detection['keypoints'][max_iou_pos], pose['bbox']): cur_trajectory.append( [int(pose['track_id']), 1, int(pose['frameNumber'])] + detection['keypoints'][max_iou_pos] + [0]) result_trajectories.append(cur_trajectory) return result_trajectories def save_trajectories(save_path, trajectories): with open(save_path, 'w') as txtfile: for trajectory in trajectories: for pose in trajectory: txtfile.write(' '.join(map(lambda x: str(x), pose)) + '\n') def save_trajectories_from_all(save_base_path, posetrack_anno_base_path=kPosetrackCSVAnnotationBasePath, coco_keypoints_base_path=kCOCOKeypointsBasePath): posetrack_annos = load_posetrack_cvs_annotation_all(posetrack_anno_base_path) coco_keypoints = load_coco_keypoints_all(coco_keypoints_base_path) for posetrack_annotation in posetrack_annos: left_coco_keypoints = [] for coco_keypoint in coco_keypoints: if posetrack_annotation['setname'] == coco_keypoint['setname']: save_trajectories(os.path.join(save_base_path, posetrack_annotation['setname'] + '.txt'), get_trajectories(posetrack_annotation, coco_keypoint)) else: left_coco_keypoints.append(coco_keypoint) coco_keypoints = left_coco_keypoints if "__main__" == __name__: save_trajectories_from_all(kCOCOKeypointsBasePath) # ()() # ('') HAANJU.YOO
Beautifying the neighborhoods we inhabit extends beyond lawn care – it means taking an active role in maintaining healthy, flourishing public venues. That's why we've teamed with New Hampshire Fisher Cats as their official landscape and snow management provider at Northeast Delta Dental Stadium. The goal of this partnership is to create a welcoming experience for visitors both inside and outside of the stadium. This starts with the custom granite sign that greet you as you approach the stadium, to the landscaping that accentuates the rustic charm of the surrounding mill buildings. And don’t forget the accommodating walking paths we designed to welcome in visitors approaching the park from all directions. And, of course, we'll also be making our way onto the field… to maintain the grass and keep it in perfect playing condition. Look out for our hand-crafted flower arrangements in the stadium's luxury suites, too. When players hang up their spikes for the year, we'll also keep the stadium looking its best with snow removal that allows for public and organized events to be held there throughout the year. It takes a lot of players to make a team truly extraordinary. While we might not be hitting any home runs ourselves, we’ll make sure the look of the stadium is always a smash hit. We're proud to be NH Fisher Cats fans ourselves, so being involved in revitalizing this ballpark is a pleasure we approach with an all-business approach.
# Copyright 2008-2015 Nokia Networks # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from codecs import BOM_UTF8 from .robottypes import is_string class Utf8Reader(object): def __init__(self, path_or_file): if is_string(path_or_file): self._file = open(path_or_file, 'rb') self._close = True else: self._file = path_or_file self._close = False # IronPython handles BOM incorrectly if file not opened in binary mode: # https://ironpython.codeplex.com/workitem/34655 if hasattr(self._file, 'mode') and self._file.mode != 'rb': raise ValueError('Only files in binary mode accepted.') def __enter__(self): return self def __exit__(self, *exc_info): if self._close: self._file.close() def read(self): return self._decode(self._file.read()) def readlines(self): for index, line in enumerate(self._file.readlines()): yield self._decode(line, remove_bom=index == 0) def _decode(self, content, remove_bom=True): if remove_bom and content.startswith(BOM_UTF8): content = content[len(BOM_UTF8):] return content.decode('UTF-8')
English is a funny language. Take for example the alphabets itself. How many of you really know that & was basically a part of the English alphabet system? Though it sounds a bit bizarre to a pair of modern ears, but it’s very true. Just like the symbol, its name and origin is also equally intriguing. So, if you love to learn about the mystery, read on. The symbol of ampersand was very much in use for more than 1500 years. The earliest known use of & can be traced back to the Roman period. The Roman scribes practiced a cursive style of handwriting. Whenever they have had to write et al, which also means and even in today’s parlance, they intertwingled both the alphabets e and t. This is what believed to give the ampersand its current shape. With repetitive use, the style had become a fashion and thereby took a dominant position in the English dictionary. During the early 19th century & formed to be the 27th letter in the English alphabet system. It was during that period that the school children literally concluded reciting ABCD with &. However, as it was very confusing to say “X, Y, Z, and”, the students took a refuge to “X, Y, Z, and per se and”. The term Per se literally means by itself. So, the students actually meant to say X, Y, Z, and by itself and. With time “and per se and” corrupted and subsequently gave rise to the term ampersand. Once a dominant character in the English alphabet, & is left today to appear only in the names of business corporations. This entry was posted in Knowledge Transfer and tagged &, & in english alphabet, ampersand, English alphabet, history of ampersand, origin of ampersand. Bookmark the permalink.
# Copyright (c) 2014, Menno Smits # Released subject to the New BSD License # Please see http://en.wikipedia.org/wiki/BSD_licenses from mock import patch, sentinel from imapclient.imapclient import IMAPClient from imapclient.test.util import unittest class TestInit(unittest.TestCase): def setUp(self): self.patcher = patch('imapclient.imapclient.imaplib') self.imaplib = self.patcher.start() def tearDown(self): self.patcher.stop() def test_plain(self): self.imaplib.IMAP4.return_value = sentinel.IMAP4 imap = IMAPClient('1.2.3.4') self.assertEqual(imap._imap, sentinel.IMAP4) self.imaplib.IMAP4.assert_called_with('1.2.3.4', 143) self.assertEqual(imap.host, '1.2.3.4') self.assertEqual(imap.port, 143) self.assertEqual(imap.ssl, False) self.assertEqual(imap.stream, False) def test_SSL(self): self.imaplib.IMAP4_SSL.return_value = sentinel.IMAP4_SSL imap = IMAPClient('1.2.3.4', ssl=True) self.assertEqual(imap._imap, sentinel.IMAP4_SSL) self.imaplib.IMAP4_SSL.assert_called_with('1.2.3.4', 993) self.assertEqual(imap.host, '1.2.3.4') self.assertEqual(imap.port, 993) self.assertEqual(imap.ssl, True) self.assertEqual(imap.stream, False) def test_SSL_kwargs(self): self.imaplib.IMAP4_SSL.return_value = sentinel.IMAP4_SSL imap = IMAPClient('1.2.3.4', ssl=True, keyfile='key.pem', certfile='cert.pem') self.assertEqual(imap._imap, sentinel.IMAP4_SSL) self.imaplib.IMAP4_SSL.assert_called_with('1.2.3.4', 993, keyfile='key.pem', certfile='cert.pem') self.assertEqual(imap.ssl, True) self.assertEqual(imap.stream, False) imap = IMAPClient('1.2.3.4', ssl=True, ssl_context=sentinel.context) self.imaplib.IMAP4_SSL.assert_called_with('1.2.3.4', 993, ssl_context=sentinel.context) self.assertEqual(imap.ssl, True) self.assertEqual(imap.stream, False) def test_stream(self): self.imaplib.IMAP4_stream.return_value = sentinel.IMAP4_stream imap = IMAPClient('command', stream=True) self.assertEqual(imap._imap, sentinel.IMAP4_stream) self.imaplib.IMAP4_stream.assert_called_with('command') self.assertEqual(imap.host, 'command') self.assertEqual(imap.port, None) self.assertEqual(imap.ssl, False) self.assertEqual(imap.stream, True) def test_ssl_and_stream_is_error(self): self.assertRaises(ValueError, IMAPClient, 'command', ssl=True, stream=True) def test_stream_and_port_is_error(self): self.assertRaises(ValueError, IMAPClient, 'command', stream=True, port=123)
As for studying and my classes, I think the difficult part is trying to find an efficient way of studying and getting things solidly lodged in my brain. College differs from highschool in that it isn't all laid out for you - you have to work your own method to get stuff done. Hopefully this week (week two) will make me feel a little bit more comfortable in a studying routine. And in the meantime, I'll probably be messing around till all hours on photobooth with Kristie and not reading psychology. I really love reading all your posts. I have been debating on going to school in Seattle for quite some time now. I have been super nervous about the whole situation because I am really shy, and have a hard time making friends. I love seeing how much fun you have been having, and I really think now is the time to take a risk and just go for it! I know it's crazy, but reading your blog posts have really helped me to not be so scared anymore. I think I could do it! Seattle isn't all that bad at all, as much as any other city. People here can be a bit cold seeming if you are from anywhere in the south, which I am, but I am used to spending enough time around Swedes to be used to it. There is a lot going on though and I would say it is quite fun but the price of living is pretty extraordinary as opposed what I'm used to. A lot of people live in outlying areas and commute, the bus system is great. I live in Capitol Hill neighborhood, and if you're looking for somewhat cheap apartments as opposed to dorms you can normally grab one for around 850$ a month (that's what I pay). If you're looking into going to UW though, the University District is way cheaper, you'd just have to be in the whole 'college town' type vibe. Really depends on what kind of lifestyle you want to live. Good luck! So glad you're having a whale of a time! Looking absolutely lovely m'dear! i love the cute sunnies! Now I'm also craving pasta for lunch. The Italian festival sounds like so much fun!
import datetime import sys begin = datetime.datetime.now() import pyodbc import numpy as np import matplotlib.pyplot as plt plt.style.use('ggplot') dataini = "'"+str(sys.argv[1]) horaini = str(sys.argv[2])+"'" datafim = "'"+str(sys.argv[3]) horafim = str(sys.argv[4])+"'" cnxn = pyodbc.connect('DRIVER={SQL Server};SERVER=.\SQLExpress;DATABASE=DTS_Teste;UID=sa;PWD=Elipse21') cursor = cnxn.cursor() string = ' ' for i in np.arange(501)[1:501]: if i == 500: string = string + 'T' + str(i) else: string = string + 'T' + str(i) + ',' querystring = 'SELECT'+string+' FROM Hist_SupportTest1 WHERE E3TimeStamp >= CAST('+str(dataini)+' '+str(horaini)+' AS datetime) AND E3TimeStamp <= CAST('+str(datafim)+' '+str(horafim)+' AS datetime) ' print querystring cursor.execute(querystring) rows = cursor.fetchall() intensity = [] for item in rows: intensity.append(item) #convert intensity (list of lists) to a numpy array for plotting intensity = np.array(intensity) x = np.arange(500) y = np.arange(len(rows)) #setup the 2D grid with Numpy x, y = np.meshgrid(x, y) print np.shape(intensity) #now just plug the data into pcolormesh, it's that easy! plt.pcolormesh(x, y, intensity) plt.colorbar() #need a colorbar to show the intensity scale plt.savefig('heatmap.png') end = datetime.datetime.now() print 'tempo:', end - begin plt.show()
This arts project used the brain as a metaphor for networking, developing pathways and making connections between people.It focussed on recent research, exploding some of the myths around disorders of the brain and creativity. During 2014 APEX ARTS devised and toured an artistic, musical performance, themed around neurodiversity, throughout Cornwall and then further afield culminating in an appearance, seminar and workshop programme at the University of East London hosted by the Multimedia and Learning Disability Research and Development Group, The Rix Centre. The show had an emphasis on inspiring and informing and featured original music, writings, poems and reflections. These outcomes are shared on this website alongside other artistic submissions by participants. This provides a record of the project but it can also be used as a learning tool.
# Python wrapper to the Maemo 4.0 "Chinook" liblocation. # Wrapper version 0.1. # # Copyright 2008 by Robert W. Brewer < rwb123 at gmail dot com > # Licensed under GNU LGPL v3. # # This file is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # Please see <http://www.gnu.org/licenses/> for a copy of the # GNU Lesser General Public License. ######################################## # For a documentation overview of liblocation please see: # http://maemo.org/development/documentation/how-tos/4-x/maemo_connectivity_guide.html#Location ######################################### import gobject import ctypes as C from types import MethodType ######################################## # constants ######################################## (STATUS_NO_FIX, STATUS_FIX, STATUS_DGPS_FIX) = range(3) (MODE_NOT_SEEN, MODE_NO_FIX, MODE_2D, MODE_3D) = range(4) NONE_SET = 0 ALTITUDE_SET = 1<<0 SPEED_SET = 1<<1 TRACK_SET = 1<<2 CLIMB_SET = 1<<3 LATLONG_SET = 1<<4 TIME_SET = 1<<5 ######################################## # ctypes structure definitions ######################################## class GTypeInstance(C.Structure): _fields_ = [('g_class', C.c_ulong)] class GObject(C.Structure): _fields_ = [('g_type_instance', GTypeInstance), ('ref_count', C.c_uint), ('qdata', C.c_void_p)] class GPtrArray(C.Structure): _fields_ = [('pdata', C.c_void_p), ('len', C.c_uint)] class LocationGPSDeviceSatellite(C.Structure): _fields_ = [('prn', C.c_int), ('elevation', C.c_int), ('azimuth', C.c_int), ('signal_strength', C.c_int), ('in_use', C.c_int)] class LocationGPSDeviceFix(C.Structure): _fields_ = [('mode', C.c_int), ('fields', C.c_uint), ('time', C.c_double), ('ept', C.c_double), ('latitude', C.c_double), ('longitude', C.c_double), ('eph', C.c_double), ('altitude', C.c_double), ('epv', C.c_double), ('track', C.c_double), ('epd', C.c_double), ('speed', C.c_double), ('eps', C.c_double), ('climb', C.c_double), ('epc', C.c_double), # private, not used yet ('pitch', C.c_double), ('roll', C.c_double), ('dip', C.c_double)] class CLocationGPSDevice(C.Structure): _fields_ = [('parent', GObject), ('online', C.c_int), ('status', C.c_int), ('Cfix', C.POINTER(LocationGPSDeviceFix)), ('satellites_in_view', C.c_int), ('satellites_in_use', C.c_int), ('Csatellites', C.POINTER(GPtrArray))] # of LocationGPSDeviceSatellite def sv_iter(self): if not self.Csatellites: return gar = self.Csatellites.contents sv_ptr_ptr = C.cast(gar.pdata, C.POINTER(C.POINTER(LocationGPSDeviceSatellite))) for i in range(gar.len): yield sv_ptr_ptr[i].contents def __getattr__(self, name): try: return C.Structure.__getattr__(self) except AttributeError: if name == 'fix': if self.Cfix: return self.Cfix.contents else: return None if name == 'satellites': return self.sv_iter() raise AttributeError class CLocationGPSDControl(C.Structure): _fields_ = [('parent', GObject), ('can_control', C.c_int)] ################################################ # gobject C->Python boilerplate from pygtk FAQ ################################################ # this boilerplate can convert a memory address # into a proper python gobject. class _PyGObject_Functions(C.Structure): _fields_ = [ ('register_class', C.PYFUNCTYPE(C.c_void_p, C.c_char_p, C.c_int, C.py_object, C.py_object)), ('register_wrapper', C.PYFUNCTYPE(C.c_void_p, C.py_object)), ('register_sinkfunc', C.PYFUNCTYPE(C.py_object, C.c_void_p)), ('lookupclass', C.PYFUNCTYPE(C.py_object, C.c_int)), ('newgobj', C.PYFUNCTYPE(C.py_object, C.c_void_p)), ] class PyGObjectCPAI(object): def __init__(self): addr = C.pythonapi.PyCObject_AsVoidPtr( C.py_object(gobject._PyGObject_API)) self._api = _PyGObject_Functions.from_address(addr) def pygobject_new(self, addr): return self._api.newgobj(addr) # call like this: # Cgobject = PyGObjectCPAI() # Cgobject.pygobject_new(memory_address) # to get memory address from a gobject: # address = hash(obj) ################################### # pythonized functions ################################### def gps_device_get_type(): return loc_gps_type() def gps_device_get_new(): def struct(self): ptr = C.cast(C.c_void_p(hash(self)), C.POINTER(CLocationGPSDevice)) return ptr.contents # create C gobject for gps device cgps_dev = gobj_new(gps_device_get_type(), None) # wrap in python gobject pyobj = Cgobject.pygobject_new(cgps_dev) # add a struct() method to hide the ctypes stuff. setattr(pyobj, 'struct', MethodType(struct, pyobj, pyobj.__class__)) return pyobj def gps_device_reset_last_known(gpsdevice): libloc.location_gps_device_reset_last_known(C.c_void_p(hash(gpsdevice))) def gps_device_start(gpsdevice): libloc.location_gps_device_start(C.c_void_p(hash(gpsdevice))) def gps_device_stop(gpsdevice): libloc.location_gps_device_stop(C.c_void_p(hash(gpsdevice))) def gpsd_control_get_default(): def struct(self): ptr = C.cast(C.c_void_p(hash(self)), C.POINTER(CLocationGPSDControl)) return ptr.contents gpsd_control_ptr = loc_gpsd_control() # wrap in python object pyobj = Cgobject.pygobject_new(gpsd_control_ptr) # add a struct() method to hide the ctypes stuff. setattr(pyobj, 'struct', MethodType(struct, pyobj, pyobj.__class__)) return pyobj def gpsd_control_start(gpsdcontrol): libloc.location_gpsd_control_start(C.c_void_p(hash(gpsdcontrol))) def gpsd_control_stop(gpsdcontrol): libloc.location_gpsd_control_stop(C.c_void_p(hash(gpsdcontrol))) def gpsd_control_request_status(gpsdcontrol): libloc.location_gpsd_control_request_status(C.c_void_p(hash(gpsdcontrol))) ######################################## # initialize library ######################################## # load C libraries libloc = C.CDLL('liblocation.so.0') libgobject = C.CDLL('libgobject-2.0.so.0') Cgobject = PyGObjectCPAI() # inform ctypes of necessary function prototype information loc_gps_type = libloc.location_gps_device_get_type loc_gps_type.restype = C.c_ulong gobj_new = libgobject.g_object_new gobj_new.restype = C.c_void_p loc_gpsd_control = libloc.location_gpsd_control_get_default loc_gpsd_control.restype = C.POINTER(CLocationGPSDControl) libloc.location_distance_between.argtypes = [C.c_double, C.c_double, C.c_double, C.c_double] libloc.location_distance_between.restype = C.c_double
If you have been accused of operating a vehicle under the influence (OVI) or driving under the influence (DUI) in Ohio, you need to know that the consequences could result in legal penalties that are more serious than the embarrassment and humiliation that many experience after an arrest. DUI convictions are serious business. Being convicted of an OVI offense can have a serious impact on your life. Penalties required by law include mandatory jail time or a 72-hour alcohol intervention program for first-time offenders. Repeat offenders could face costly fines, and high-test offenders could be subject to special, restricted license plates. In extreme cases, drivers may be sentenced to forfeit their vehicles. At The Law Offices of Saia & Piatt, Inc., we are serious about representing our clients after an OVI arrest. Do not risk lengthy jail time sentences, expensive fines, or license suspension; we prepare extensively in order to achieve the best possible resolution for you. If you have been arrested for drunk driving, call a Columbus OVI attorney from The Law Offices of Saia & Piatt, Inc. immediately. Felony offenses for multiple OVI charges could lead to serious penalties. Any subsequent offenses after a fourth offense could result in penalties, including up to 5 years in prison, up to $10,500 in fines, vehicle forfeiture, and license suspension for life. In addition, there are non-legal penalties such as professional licensure problems, higher insurance rates, lost income due to court appearances and sentencing obligations, and towing and storage fees. Convicted persons typically must rely on public transportation and face strained relationships or possible loss of employment. No matter what charges you may be facing, it will be crucial to have a competent, qualified Columbus OVI attorney by your side. At The Law Offices of Saia & Piatt, Inc., Attorney Jon Saia is an active member of the National College of DUI Defense and has been included in the Best Lawyers® listing of Best Lawyers in America. Attorney Richard Piatt is a nationally recognized OVI defense lawyer. We will not be afraid to take your case before a jury! No matter how complex you think your case is, we can utilize our knowledge and experience to your benefit. You can rest assured that from the moment you retain our firm, your rights will be protected every step of the way. Living with a DUI conviction could have long-lasting effects on your life. You need an aggressive attorney who concentrates on defending clients accused of drunk driving cases. Contact The Law Offices of Saia & Piatt, Inc. to schedule a free case evaluation with one of our knowledgeable trial attorneys today. Our phone lines are open 24 hours a day, 7 days a week. There's nothing to lose but time, so call us now! We proudly serve areas throughout Ohio, including Akron, Canton, Cincinnati and Dayton.
# Copyright (C) 2012 Alexander Jones # # This file is part of Manitae. # # Manitae is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Manitae is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Manitae. If not, see <http://www.gnu.org/licenses/>. from PyQt4 import QtCore class ManitaeLogger(QtCore.QObject): send_entry = QtCore.pyqtSignal(str) def __init__(self): super(ManitaeLogger, self).__init__() def append_notice(self, notice): temp_string = "<p style=\"margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; white-space:pre-wrap\">" + notice + "</p><br/>\n"; self.send_entry.emit(temp_string) def append_warning(self, warning): tempString = "<p style=\"margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; white-space:pre-wrap; color:#c00000\">" + warning + "</p><br/>\n"; self.send_entry.emit(tempString);
This is clearly very true – make no mistake that your customers can and will walk for any number of reasons. Whether you’re a business that is established, or whether you’re starting up what is one of the most important assets you will ever have? Your customers of course! They are the bread and butter of any business so it’s only right you treat them properly – this could be with special offers or simply good customer service. If you don’t they’re gone and someone else will reap the rewards. Unfortunately this isn’t always the case and it’s not unusual for any business to lose existing customers because their focus isn’t on them. My advice is always treat your customers well, listen to them and never ever take them for granted. And remember, your customers not only bring in the money, they’ll also bring in more customers so listen up and be smart because retaining them is just as difficult as getting them in the first place.
from __future__ import print_function import os import logging from pdb import pm from miasm.loader import pe from miasm.analysis.sandbox import Sandbox_Win_x86_32 from miasm.os_dep.common import get_win_str_a # User defined methods def kernel32_GetProcAddress(jitter): """Hook on GetProcAddress to note where UPX stores import pointers""" ret_ad, args = jitter.func_args_stdcall(["libbase", "fname"]) # When the function is called, EBX is a pointer to the destination buffer dst_ad = jitter.cpu.EBX logging.error('EBX ' + hex(dst_ad)) # Handle ordinal imports fname = (args.fname if args.fname < 0x10000 else get_win_str_a(jitter, args.fname)) logging.error(fname) # Get the generated address of the library, and store it in memory to # dst_ad ad = sb.libs.lib_get_add_func(args.libbase, fname, dst_ad) # Add a breakpoint in case of a call on the resolved function # NOTE: never happens in UPX, just for skeleton jitter.handle_function(ad) jitter.func_ret_stdcall(ret_ad, ad) parser = Sandbox_Win_x86_32.parser(description="Generic UPX unpacker") parser.add_argument("filename", help="PE Filename") parser.add_argument('-v', "--verbose", help="verbose mode", action="store_true") parser.add_argument("--graph", help="Export the CFG graph in graph.dot", action="store_true") options = parser.parse_args() options.load_hdr = True sb = Sandbox_Win_x86_32(options.filename, options, globals(), parse_reloc=False) if options.verbose is True: logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.WARNING) if options.verbose is True: print(sb.jitter.vm) # Ensure there is one and only one leave (for OEP discovering) mdis = sb.machine.dis_engine(sb.jitter.bs) mdis.dont_dis_nulstart_bloc = True asmcfg = mdis.dis_multiblock(sb.entry_point) leaves = list(asmcfg.get_bad_blocks()) assert(len(leaves) == 1) l = leaves.pop() logging.info(l) end_offset = mdis.loc_db.get_location_offset(l.loc_key) logging.info('final offset') logging.info(hex(end_offset)) # Export CFG graph (dot format) if options.graph is True: open("graph.dot", "w").write(asmcfg.dot()) if options.verbose is True: print(sb.jitter.vm) def update_binary(jitter): sb.pe.Opthdr.AddressOfEntryPoint = sb.pe.virt2rva(jitter.pc) logging.info('updating binary') for s in sb.pe.SHList: sdata = sb.jitter.vm.get_mem(sb.pe.rva2virt(s.addr), s.rawsize) sb.pe.rva.set(s.addr, sdata) # Stop execution jitter.run = False return False # Set callbacks sb.jitter.add_breakpoint(end_offset, update_binary) # Run sb.run() # Rebuild PE # Alternative solution: miasm.jitter.loader.pe.vm2pe(sb.jitter, out_fname, # libs=sb.libs, e_orig=sb.pe) new_dll = [] sb.pe.SHList.align_sections(0x1000, 0x1000) logging.info(repr(sb.pe.SHList)) sb.pe.DirRes = pe.DirRes(sb.pe) sb.pe.DirImport.impdesc = None logging.info(repr(sb.pe.DirImport.impdesc)) new_dll = sb.libs.gen_new_lib(sb.pe) logging.info(new_dll) sb.pe.DirImport.impdesc = [] sb.pe.DirImport.add_dlldesc(new_dll) s_myimp = sb.pe.SHList.add_section(name="myimp", rawsize=len(sb.pe.DirImport)) logging.info(repr(sb.pe.SHList)) sb.pe.DirImport.set_rva(s_myimp.addr) # XXXX TODO sb.pe.NThdr.optentries[pe.DIRECTORY_ENTRY_DELAY_IMPORT].rva = 0 bname, fname = os.path.split(options.filename) fname = os.path.join(bname, fname.replace('.', '_')) open(fname + '_unupx.bin', 'wb').write(bytes(sb.pe))
While everyone knows that content marketing is a critical component of inbound marketing strategy, many fail when it comes to execution. By exploring the contents of this ebook, you’ll discover key strategies that, when implemented, will help your brand receive the online attention it deserves. Follow OneIMS as we journey through effective content marketing tactics.
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: david@reciprocitylabs.com # Maintained By: david@reciprocitylabs.com import json from flask import request, current_app from ggrc.fulltext import get_indexer from .common import DateTimeEncoder from .util import url_for def search(): terms = request.args.get('q') if not terms or terms is None: return current_app.make_response(( 'Query parameter "q" specifying search terms must be provided.', 400, [('Content-Type', 'text/plain')], )) should_group_by_type = request.args.get('group_by_type') if should_group_by_type is not None and \ should_group_by_type.lower() == 'true': return group_by_type_search(terms) return basic_search(terms) def do_search(terms, list_for_type): indexer = get_indexer() results = indexer.search(terms) for result in results: id = result.key model_type = result.type entries_list = list_for_type(model_type) entries_list.append({ 'id': id, 'type': model_type, 'href': url_for(model_type, id=id), }) def make_search_result(entries): return current_app.make_response(( json.dumps({ 'results': { 'selfLink': request.url, 'entries': entries, } }, cls=DateTimeEncoder), 200, [('Content-Type', 'application/json')], )) def basic_search(terms): entries = [] list_for_type = lambda t: entries do_search(terms, list_for_type) return make_search_result(entries) def group_by_type_search(terms): entries = {} list_for_type = \ lambda t: entries[t] if t in entries else entries.setdefault(t, []) do_search(terms, list_for_type) return make_search_result(entries)
Here is the new Monthly GK Capsules. This time we are going to share official PDF file of AAA-Bright Academy which contains Monthly GK updates of March 2019. So if you are preparing for any Government, Bank, Railways,SSC Examination this PDF file is very helpful to you. All the best to all participants for the upcoming Examination.
from django.apps import AppConfig from django.contrib import admin from django.db import models class AutoDisplayAdmin(admin.ModelAdmin): list_links_fields = ('CharField', 'IntegerField', 'AutoField', 'DateField', 'DateTimeField', 'SlugField', 'BigIntegerField', 'EmailField', 'BooleanField', 'DecimalField', 'FloatField', 'IPAddressField', 'GenericIPAddressField', 'NullBooleanField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'UrlField', 'TimeField',) list_display_fields = list_links_fields + ('ForeignKey', ) def __init__(self, *args, **kwargs): admin.ModelAdmin.__init__(self, *args, **kwargs) self.list_display = [] self.list_display_links = [] for field in args[0]._meta.fields: if field.get_internal_type() in self.list_display_fields: self.list_display.append(field.name) if len(self.list_display_links) < 2 and field.get_internal_type() in self.list_links_fields: self.list_display_links.append(field.name) class DjangoAutoRegisterConfig(AppConfig): name = 'django_autoregister' verbose_name = 'Django model auto registration plugin' def ready(self): all_models = models.get_models() registered_models = admin.site._registry for model in all_models: if model in registered_models: continue admin.site.register(model, AutoDisplayAdmin)
Second, what I am seeing in these arguments is a very dangerous practice called "cherry picking"; selectively picking out data that support your argument and ignoring contrary evidence. It certainly looks interesting that Earth, Mars, Jupiter, Triton, and Pluto are warming, and if that’s all you heard then it seems logical to think maybe the Sun is the cause. But they aren’t the only objects in the solar system. What about Mercury, Venus, Saturn, Uranus… and if you include Triton to support your case, you’d better also take a good look at the nearly 100 other sizable moons in the solar system. Are they warming too? I have heard nothing about them in these arguments, and I suspect it’s because there’s not much to say. If they are not warming, then deniers won’t mention them, and scientists won’t report it because there is nothing to report ("News flash: Phobos still the same temperature!" is unlikely to get into Planetary Science journals). However, I can’t say that with conviction, because the absence of evidence is not evidence of absence. Any planetary scientists reading this blog entry, please contact me. I’m interested in hearing more. Mars: To start, is Mars even warming globally at all? Perhaps not — it might be a local effect. And if it is global, there already is an idea of why that might be happening: it would be due to periodic changes in its orbit, called Milankovitch cycles. The Earth has them too, and they do affect our climate. And the guy who is proposing that the Sun is warming Mars doesn’t think CO2 is a greenhouse gas. I think his science is a little suspect. His reasoning is certainly specious– he says if Mars and Earth are both warming, it must be due to the Sun. As I point out above, that is clearly not necessarily the case. Even if this martian warming turns out to be true, it may just be a natural effect of the shape of the orbit of Mars. Of course it’s possible. There are links to the Sun’s behavior and Earth’s climate (look up the Maunder minimum for some interesting reading), and it would be foolish to simply deny this. However, this is a vastly complex and difficult system to understand, and simply claiming "Yes it’s due to the Sun" or "No it’s not due to the Sun" is certainly naive. 5) There are political and ideological ramifications of global warming, and a lot of people — politicians, in fact — have a lot at stake and are known to twist science to meet their needs. With all of these facts lined up, it’s clear that the one thing we need to do is be very, very careful when someone comes in and makes a broad, sweeping statement about global warming’s cause, especially when they have ulterior motives for saying what they do. This may sound like an ad hominem, but we have seen, over and over, how science gets abused these past few years by those in power. A jaundiced eye is critical in science, and a little skepticism — or in this case, a lot — is a good thing.
# ~*~ coding: utf-8 ~*~ # import os import paramiko from paramiko.ssh_exception import SSHException from common.utils import get_object_or_none from .models import Asset, SystemUser, Label def get_assets_by_id_list(id_list): return Asset.objects.filter(id__in=id_list).filter(is_active=True) def get_system_users_by_id_list(id_list): return SystemUser.objects.filter(id__in=id_list) def get_assets_by_fullname_list(hostname_list): return Asset.get_queryset_by_fullname_list(hostname_list) def get_system_user_by_name(name): system_user = get_object_or_none(SystemUser, name=name) return system_user def get_system_user_by_id(id): system_user = get_object_or_none(SystemUser, id=id) return system_user class LabelFilter: def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) query_keys = self.request.query_params.keys() all_label_keys = Label.objects.values_list('name', flat=True) valid_keys = set(all_label_keys) & set(query_keys) labels_query = {} for key in valid_keys: labels_query[key] = self.request.query_params.get(key) conditions = [] for k, v in labels_query.items(): query = {'labels__name': k, 'labels__value': v} conditions.append(query) if conditions: for kwargs in conditions: queryset = queryset.filter(**kwargs) return queryset
9. 1 cup berries, chopped small. 10. 1/2 cup rolled Oats, ground to powder. 1. Combine dry ingredients in a bowl. Combine wet ingredients in a separate bowl. 2. Mix the two together gently. 3. Stir in chopped berries. Batter will be lumpy; don’t over mix. 1. Keep your muffin cups greasy enough, mine were not so they were sticking at places. very nice and yummy cupcakes…. You can send this recipe to my ongoing event Berries-Strawberry-Dessert, if you want to!! wow, those look so tempting.
""" CBMPy: CBTools module ===================== PySCeS Constraint Based Modelling (http://cbmpy.sourceforge.net) Copyright (C) 2009-2018 Brett G. Olivier, VU University Amsterdam, Amsterdam, The Netherlands This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/> Author: Brett G. Olivier Contact email: bgoli@users.sourceforge.net Last edit: $Author: bgoli $ ($Id: CBTools.py 710 2020-04-27 14:22:34Z bgoli $) """ # gets rid of "invalid variable name" info # pylint: disable=C0103 # gets rid of "line to long" info # pylint: disable=C0301 # use with caution: gets rid of module xxx has no member errors (run once enabled) # pylint: disable=E1101 # preparing for Python 3 port from __future__ import division, print_function from __future__ import absolute_import # from __future__ import unicode_literals import os import time import re import pprint import gzip import zipfile try: import pickle except ImportError: import cPickle as pickle cDir = os.path.dirname(os.path.abspath(os.sys.argv[0])) import numpy from . import CBModel from .CBCommon import ( HAVE_PYPARSING, checkChemFormula, pp_chemicalFormula, extractGeneIdsFromString, ) from .CBCommon import processSpeciesChargeChemFormulaAnnot, pyparsing _PPR_ = pprint.PrettyPrinter() from .CBConfig import __CBCONFIG__ as __CBCONFIG__ __DEBUG__ = __CBCONFIG__['DEBUG'] __version__ = __CBCONFIG__['VERSION'] def createTempFileName(): """ Return a temporary filename """ return str(time.time()).split('.')[0] # TODO comrpess def storeObj(obj, filename, compress=False): """ Stores a Python *obj* as a serialised binary object in *filename*.dat - *obj* a python object - *filename* the base filename - *compress* [False] use gzip compression not *implemented* """ if filename[-4:] != '.dat': filename = filename + '.dat' F = file(filename, 'wb') pickle.dump(obj, F, protocol=2) print('Object serialised as {}'.format(filename)) F.close() def loadObj(filename): """ Loads a serialised Python pickle from *filename*.dat returns the Python object(s) """ if filename[-4:] != '.dat': filename = filename + '.dat' assert os.path.exists(filename), '\nFile \"{}\" does not exist'.format(filename) F = file(filename, 'rb') obj = pickle.load(F) F.close() return obj def deSerialize(s): """ Deserializes a serialised object contained in a string """ return pickle.loads(s) def deSerializeFromDisk(filename): """ Loads a serialised Python pickle from *filename* returns the Python object(s) """ assert os.path.exists(filename), '\nFile \"{}\" does not exist'.format(filename) F = file(filename, 'rb') obj = pickle.load(F) F.close() return obj def addStoichToFBAModel(fm): """ Build stoichiometry: this method has been refactored into the model class - cmod.buildStoichMatrix() """ fm.buildStoichMatrix() def addSinkReaction(fbam, species, lb=0.0, ub=1000.0): """ Adds a sink reactions that consumes a model *species* so that X --> - *fbam* an fba model object - *species* a valid species name - *lb* lower flux bound [default = 0.0] - *ub* upper flux bound [default = 1000.0] """ assert species in fbam.getSpeciesIds(), '\n%s is not a valid species' % species if lb < 0.0: reversible = True else: reversible = False Rname = species + '_sink' R = CBModel.Reaction( Rname, name='%s sink reaction' % species, reversible=reversible ) Su = CBModel.Reagent(Rname + species, species, -1.0) R.addReagent(Su) R.is_exchange = True clb = CBModel.FluxBound(Rname + '_lb', Rname, 'greaterEqual', lb) cub = CBModel.FluxBound(Rname + '_ub', Rname, 'lessEqual', ub) fbam.addReaction(R, create_default_bounds=False) fbam.addFluxBound(clb) fbam.addFluxBound(cub) print( '\n***\nCreated new reaction {} with bounds ({} : {})\n***\n'.format( Rname, lb, ub ) ) # TODO: check this def addSourceReaction(fbam, species, lb=0.0, ub=1000.0): """ Adds a source reactions that produces a model *species* so that --> X - *fbam* an fba model object - *species* a valid species name - *lb* lower flux bound [default = 0.0] - *ub* upper flux bound [default = 1000.0] Note reversiblity is determined by the lower bound, default 0 = irreversible. If negative then reversible. """ assert species in fbam.getSpeciesIds(), '\n%s is not a valid species' % species if lb < 0.0: reversible = True else: reversible = False Rname = species + '_src' R = CBModel.Reaction( Rname, name='%s source reaction' % species, reversible=reversible ) Su = CBModel.Reagent(Rname + species, species, 1.0) R.addReagent(Su) R.is_exchange = True clb = CBModel.FluxBound(Rname + '_lb', Rname, 'greaterEqual', lb) cub = CBModel.FluxBound(Rname + '_ub', Rname, 'lessEqual', ub) fbam.addReaction(R, create_default_bounds=False) fbam.addFluxBound(clb) fbam.addFluxBound(cub) print( '\n***\nCreated new reaction {} with bounds ({} : {})\n***\n'.format( Rname, lb, ub ) ) def findDeadEndMetabolites(fbam): """ Finds dead-end (single reaction) metabolites rows in N with a single entry), returns a list of (metabolite, reaction) ids """ fbam.buildStoichMatrix() orphaned_list = [] for rr in range(fbam.N.array.shape[0]): if (fbam.N.array[rr, :] != 0.0).sum() == 1: if __DEBUG__: print(fbam.N.array[rr, :]) if __DEBUG__: print(fbam.N.row[rr]) for c in range(fbam.N.array.shape[1]): if fbam.N.array[rr, c] != 0.0: orphaned_list.append((fbam.N.row[rr], fbam.N.col[c])) return orphaned_list def findDeadEndReactions(fbam): """ Finds dead-end (single substrate/product) reactions (cols in N with a single entry), returns a list of (metabolite, reaction) ids """ fbam.buildStoichMatrix() orphaned_list = [] for cc in range(fbam.N.array.shape[1]): if (fbam.N.array[:, cc] != 0.0).sum() == 1: if __DEBUG__: print(fbam.N.array[:, cc]) if __DEBUG__: print(fbam.N.col[cc]) for r in range(fbam.N.array.shape[0]): if fbam.N.array[r, cc] != 0.0: orphaned_list.append((fbam.N.row[r], fbam.N.col[cc])) return orphaned_list def setSpeciesPropertiesFromAnnotations( fbam, overwriteCharge=False, overwriteChemFormula=False ): """ This will attempt to set the model Species properties from the annotation. With the default options it will only replace missing data. With ChemicalFormula this is easy to detect however charge may have an "unknown value" of 0. Setting the optional values to true will replace any existing value with any valid annotation. - *overwriteChemFormula* [default=False] - *overwriteCharge* [default=False] """ for s_ in fbam.species: try: processSpeciesChargeChemFormulaAnnot( s_, getFromName=False, overwriteCharge=overwriteCharge, overwriteChemFormula=overwriteChemFormula, ) except Exception: print( 'processSpeciesChargeChemFormulaAnnot failed for species with id: {}'.format( s_.getId() ) ) def fixReversibility(fbam, auto_correct=False): """ Set fluxbound lower bound from reactions reversibility information. - *fbam* and FBAModel instance - *auto_correct* (default=False) if True automatically sets lower bound to zero if required, otherwise prints a warning if false. """ for c in fbam.flux_bounds: R = c.reaction # print R O = c.operation # print O V = c.value # print V R_obj = fbam.reactions[fbam.getReactionIds().index(c.reaction)] RE = R_obj.reversible # print RE if O in ['greater', 'greaterEqual']: if not RE and float(V) < 0.0: print( 'Warning {} is not reversible and lower bound is {}.'.format(R, V) ) if auto_correct: print('Resetting {} lower bound ({}) to zero'.format(R, V)) c.value = 0.0 else: print( 'Reaction ({}) reversible={} inconsistent with fluxbound lower bound ({}) run with auto_correct=True to reset lower bound.'.format( R, RE, V ) ) time.sleep(1) def splitReversibleReactions(fba, selected_reactions=None): """ Split a (set of) reactions into reversible reactions returns a copy of the original model R1: A = B R1f: A -> B R1r: B -> A - *fba* an instantiated CBMPy model object - *selected_reactions* if a reversible reaction id is in here split it """ if selected_reactions is None: selected_reactions = [] M = fba.clone() if len(selected_reactions) == 0: selected_reactions = M.getReversibleReactionIds() for r_ in M.getReversibleReactionIds(): if r_ in selected_reactions: splitSingleReversibleReaction(M, r_) else: pass return M def splitSingleReversibleReaction(fba, rid, fwd_id=None, rev_id=None): """ Split a single reversible reaction into two irreversible reactions, returns the original reversible reaction and bounds while deleting them from model. R1: A = B R1_fwd: A -> B R1_rev: B -> A - *fba* an instantiated CBMPy model object - *rid* a valid reaction id - *fwd_id* [default=None] the new forward reaction id, defaults to rid_fwd - *rev_id* [default=None] the new forward reaction id, defaults to rid_rev """ R = fba.getReaction(rid) assert R != None print('Reversible reaction splitter is processing: {}'.format(rid)) Rf = R.clone() Rb = R.clone() Rf.setAnnotation('cbmpy_split_fwd', rid) Rb.setAnnotation('cbmpy_split_rev', rid) RB = fba.getReactionBounds(rid) LB = UB = EB = None if RB[1] != None and RB[2] != None: assert ( RB[1] <= RB[2] ), 'ERROR: For reaction splitting ({}): LowerBound ({}) must be LessEqual to UpperBound ({})'.format( rid, round(RB[1], 6), round(RB[2], 6) ) if RB[1] != None: LB = fba.getFluxBoundByReactionID(rid, 'lower') if RB[2] != None: UB = fba.getFluxBoundByReactionID(rid, 'upper') if RB[3] != None: EB = fba.getFluxBoundByReactionID(rid, 'equality') fba.deleteReactionAndBounds(rid) if fwd_id is None: fwd_id = Rf.getId() + '_fwd' Rf.setPid(fwd_id) if rev_id is None: rev_id = Rb.getId() + '_rev' Rb.setPid(rev_id) Rf.reversible = False Rb.reversible = False for rr_ in Rf.reagents: rr_.setPid(rr_.getId() + '_fwd') for rr_ in Rb.reagents: rr_.setCoefficient(-1.0 * rr_.getCoefficient()) rr_.setPid(rr_.getId() + '_rev') fba.addReaction(Rf, create_default_bounds=False) fba.addReaction(Rb, create_default_bounds=False) if EB != None: fba.createReactionLowerBound(Rf.getId(), EB.getValue()) fba.createReactionUpperBound(Rf.getId(), EB.getValue()) fba.createReactionLowerBound(Rb.getId(), EB.getValue()) fba.createReactionUpperBound(Rb.getId(), EB.getValue()) elif LB != None and UB != None: if LB.getValue() <= 0.0 and UB.getValue() >= 0.0: fba.createReactionLowerBound(Rf.getId(), 0.0) fba.createReactionUpperBound(Rf.getId(), UB.getValue()) fba.createReactionLowerBound(Rb.getId(), 0.0) fba.createReactionUpperBound(Rb.getId(), abs(LB.getValue())) elif LB.getValue() > 0.0 and UB.getValue() > 0.0: fba.createReactionLowerBound(Rf.getId(), LB.getValue()) fba.createReactionUpperBound(Rf.getId(), UB.getValue()) fba.createReactionLowerBound(Rb.getId(), 0.0) fba.createReactionUpperBound(Rb.getId(), 0.0) if LB.getValue() < 0.0 and UB.getValue() < 0.0: fba.createReactionLowerBound(Rf.getId(), 0.0) fba.createReactionUpperBound(Rf.getId(), 0.0) fba.createReactionLowerBound(Rb.getId(), abs(UB.getValue())) fba.createReactionUpperBound(Rb.getId(), abs(LB.getValue())) elif LB != None and UB is None: if LB.getValue() > 0: fba.createReactionLowerBound(Rf.getId(), LB.getValue()) fba.createReactionUpperBound(Rf.getId(), float('inf')) fba.createReactionLowerBound(Rb.getId(), 0.0) fba.createReactionUpperBound(Rb.getId(), 0.0) else: fba.createReactionLowerBound(Rf.getId(), 0.0) fba.createReactionUpperBound(Rf.getId(), float('inf')) fba.createReactionLowerBound(Rb.getId(), 0.0) fba.createReactionUpperBound(Rb.getId(), abs(LB.getValue())) elif LB is None and UB != None: if UB.getValue() >= 0: fba.createReactionLowerBound(Rf.getId(), 0.0) fba.createReactionUpperBound(Rf.getId(), UB.getValue()) fba.createReactionLowerBound(Rb.getId(), 0.0) fba.createReactionUpperBound(Rb.getId(), float('inf')) else: fba.createReactionLowerBound(Rf.getId(), 0.0) fba.createReactionUpperBound(Rf.getId(), 0.0) fba.createReactionLowerBound(Rb.getId(), abs(UB.getValue())) fba.createReactionUpperBound(Rb.getId(), float('inf')) else: fba.createReactionLowerBound(Rf.getId(), 0.0) fba.createReactionUpperBound(Rf.getId(), float('inf')) fba.createReactionLowerBound(Rb.getId(), 0.0) fba.createReactionUpperBound(Rb.getId(), float('inf')) return (R, LB, UB, EB) def exportLabelledArray(arr, fname, names=None, sep=',', fmt='%f'): """ Write a 2D array type object to file - *arr* the an array like object - *names* [default=None] the list of row names - *fname* the output filename - *sep* [default=','] the column separator - *fmt* [default='%s'] the output number format """ if names != None: assert arr.shape[0] == len(names), '\n ... rows must equal number of names!' F = file(fname, 'w') cntr = 0 for r in range(arr.shape[0]): if names != None: F.write(('%s' + sep) % names[r]) for c in range(arr.shape[1]): if c < arr.shape[1] - 1: F.write((fmt + sep) % arr[r, c]) else: F.write((fmt + '\n') % arr[r, c]) cntr += 1 if cntr >= 250: F.flush() cntr = 1 F.write('\n') F.flush() F.close() print('exported to {}'.format(fname)) def exportLabelledArrayWithHeader( arr, fname, names=None, header=None, sep=',', fmt='%f' ): """ Export an array with row names and header - *arr* the an array like object - *names* [default=None] the list of row names - *header* [default=None] the list of column names - *fname* the output filename - *sep* [default=','] the column separator - *fmt* [default='%s'] the output number format - *appendlist* [default=False] if True append the array to *fname* otherwise create a new file """ if names != None: assert arr.shape[0] == len(names), '\n ... rows must equal number of names!' if header != None: assert arr.shape[1] == len( header ), '\n ... cols must equal number of header names!' F = file(fname, 'w') cntr = 0 if header != None: if names != None: hstr = ' ' + sep else: hstr = '' for h in header: hstr += str(h) + sep hstr = hstr[:-1] + '\n' F.write(hstr) del hstr for r in range(arr.shape[0]): if names != None: F.write(('%s' + sep) % names[r]) for c in range(arr.shape[1]): if c < arr.shape[1] - 1: F.write((fmt + sep) % arr[r, c]) else: F.write((fmt + '\n') % arr[r, c]) cntr += 1 if cntr >= 250: F.flush() cntr = 1 F.write('\n') F.flush() F.close() print('exported to {}'.format(fname)) def exportLabelledLinkedList( arr, fname, names=None, sep=',', fmt='%s', appendlist=False ): """ Write a 2D linked list [[...],[...],[...],[...]] and optionally a list of row labels to file: - *arr* the linked list - *fname* the output filename - *names* [default=None] the list of row names - *sep* [default=','] the column separator - *fmt* [default='%s'] the output number format - *appendlist* [default=False] if True append the array to *fname* otherwise create a new file """ if names != None: assert len(arr) == len(names), '\n ... rows must equal number of names!' if not appendlist: F = file(fname, 'w') else: F = file(fname, 'a') cntr = 0 for r in range(len(arr)): if names != None: F.write(('%s' + sep) % names[r]) col_l = len(arr[0]) for c in range(col_l): if c < col_l - 1: if arr[r][c] == 0.0: F.write('0.0' + sep) else: try: F.write((fmt + sep) % arr[r][c]) except UnicodeEncodeError: F.write((fmt + sep) % 'uError') else: if arr[r][c] == 0.0: F.write('0.0\n') else: try: F.write((fmt + '\n') % arr[r][c]) except UnicodeEncodeError: F.write((fmt + '\n') % 'uError') cntr += 1 if cntr >= 250: F.flush() cntr = 1 # F.write('\n') F.flush() F.close() del arr if not appendlist: print('exported to {}'.format(fname)) def exportLabelledArrayWithHeader2CSV(arr, fname, names=None, header=None): """ Export an array with row names and header to fname.csv - *arr* the an array like object - *fname* the output filename - *names* [default=None] the list of row names - *header* [default=None] the list of column names """ fname += '.csv' exportLabelledArrayWithHeader(arr, fname, names, header, sep=',', fmt='%f') def exportLabelledArray2CSV(arr, fname, names=None): """ Export an array with row names to fname.csv - *arr* the an array like object - *fname* the output filename - *names* [default=None] the list of row names """ fname += '.csv' exportLabelledArray(arr, fname, names, sep=',', fmt='%f') def exportArray2CSV(arr, fname): """ Export an array to fname.csv - *arr* the an array like object - *fname* the output filename - *sep* [default=','] the column separator """ fname += '.csv' exportLabelledArray(arr, fname, None, sep=',', fmt='%f') def exportLabelledArrayWithHeader2TXT(arr, fname, names=None, header=None): """ Export an array with row names and header to fname.txt - *arr* the an array like object - *names* the list of row names - *header* the list of column names - *fname* the output filename """ fname += '.txt' exportLabelledArrayWithHeader(arr, fname, names, header, sep='\t', fmt='%f') def exportLabelledArray2TXT(arr, fname, names=None): """ Export an array with row names to fname.txt - *arr* the an array like object - *names* [default=None] the list of row names - *fname* the output filename """ fname += '.txt' exportLabelledArray(arr, fname, names, sep='\t', fmt='%f') def exportArray2TXT(arr, fname): """ Export an array to fname.txt - *arr* the an array like object - *fname* the output filename - *sep* [default=','] the column separator """ fname += '.txt' exportLabelledArray(arr, fname, None, sep='\t', fmt='%f') def stringReplace(fbamod, old, new, target): """ This is alpha stuff, target can be: - 'species' - 'reactions' - 'constraints' - 'objectives' - 'all' """ print('stringReplace is relatively new and UNTESTED') fbamod.id = fbamod.id.replace(old, new) if target == 'species' or target == 'all': for s in fbamod.species: s.id = s.id.replace(old, new) if target == 'reactions' or target == 'all': for s in fbamod.reactions: s.id = s.id.replace(old, new) for r in s.reagents: r.id = r.id.replace(old, new) if target == 'constraints' or target == 'all': for s in fbamod.flux_bounds: s.id = s.id.replace(old, new) s.reaction = s.reaction.replace(old, new) if target == 'objectives' or target == 'all': for s in fbamod.objectives: s.id = s.id.replace(old, new) for f in s.fluxObjectives: f.id = f.id.replace(old, new) f.reaction = f.reaction.replace(old, new) return fbamod def getBoundsDict(fbamod, substring=None): """ Return a dictionary of reactions&bounds """ rBdic = {} for r in fbamod.getReactionIds(substring=substring): name, lb, ub, eq = fbamod.getReactionBounds(r) rBdic.update({name: {'lb': lb, 'ub': ub, 'eq': eq}}) return rBdic def getExchBoundsDict(fbamod): """ Return a dictionary of all exchange reactions (as determined by the is_exchange attribute of Reaction) - *fbamod* a CBMPy model """ rBdic = {} for r in fbamod.getReactionIds(substring=None): name, lb, ub, eq = fbamod.getReactionBounds(r) rBdic.update({name: {'lb': lb, 'ub': ub, 'eq': eq}}) for r in fbamod.reactions: if not r.is_exchange: rBdic.pop(r.getId()) return rBdic def processBiGGchemFormula(fba): """ Disambiguates the overloaded BiGG name NAME_CHEMFORMULA into - *species.name* NAME - *species.chemFormula* CHEMFORMULA """ for s in fba.species: # print s.name tmp = s.name tmp2 = tmp.split('_') if len(tmp2) >= 2: CF = tmp2.pop(-1) NM = '' for se in tmp2: NM += '%s_' % se NM = NM[:-1] # NM = tmp.replace('_%s' % CF, '') else: NM = s.name CF = '' if __DEBUG__: print(NM, CF) del tmp, tmp2 if s.chemFormula in ['', None, ' '] and CF != '': s.chemFormula = CF.strip() s.name = NM.strip() def processBiGGannotationNote(fba, annotation_key='note'): """ Parse the HTML formatted reaction information stored in the BiGG notes field. This function is being deprecated and replaced by `CBTools.processSBMLAnnotationNotes()` - requires an *annotation_key* which contains a BiGG HTML fragment """ print( '\nDeprecation warning:\nCBTools.processBiGGannotationNote() is being replaced with CBTools.processSBMLAnnotationNotes' ) html_p = re.compile("<html:p>.*?</html:p>") for r in fba.reactions: new_ann = {} if annotation_key in r.annotation: hPs = re.findall(html_p, r.annotation.pop(annotation_key)) if __DEBUG__: print(hPs) for p in hPs: ps = ( p.replace('<html:p>', '') .replace('</html:p>', '') .replace('&lt;', '<') .replace('&gt;', '>') .split(':', 1) ) if len(ps) == 2: new_ann.update({ps[0].strip(): ps[1].strip()}) r.annotation.update(new_ann) if __DEBUG__: print(r.annotation) def processSBMLAnnotationNotes(fba, annotation_key='note', level=3): """ Parse the HTML formatted reaction information stored in the SBML notes field currently processes BiGG and PySCeSCBM style annotations it looks for the the annotation indexed with the *annotation_key* - *annotation_key* [default='note'] which contains a HTML/XHTML fragment in BiGG/PySCeSCBM format (ignored in L3) """ # if hasattr(fba, '_SBML_LEVEL_') and fba._SBML_LEVEL_ != None: # print('\n==================================\nINFO \"CBTools.processSBMLAnnotationNotes()\":\n') # print('This function is now called automatically\nduring model load and can be ignored.') # print('==================================\n') # return html_p = re.compile("<p>.*?</p>") html_span = re.compile("<span>.*?</span>") html_bigg_p = re.compile("<html:p>.*?</html:p>") for r in fba.reactions: if level >= 3 or annotation_key in r.annotation: new_ann = {} notes = '' if level >= 3: notes = r.getNotes() else: notes = r.annotation.pop(annotation_key) if '<span xmlns="http://www.w3.org/1999/xhtml">' in notes: hPs = re.findall(html_p, notes.replace('\n', '')) if __DEBUG__: print(hPs) for p in hPs: ps = re.findall(html_span, p) ps = [ p.replace('<span>', '') .replace('</span>', '') .replace('&lt;', '<') .replace('&gt;', '>') .strip() for p in ps ] if len(ps) == 2 and ps[0] not in r.annotation: new_ann.update({ps[0]: ps[1]}) else: hPs = re.findall(html_bigg_p, notes) if len(hPs) > 0: if __DEBUG__: print(hPs) for p in hPs: ps = ( p.replace('<html:p>', '') .replace('</html:p>', '') .replace('&lt;', '<') .replace('&gt;', '>') .split(':', 1) ) if len(ps) == 2 and ps[0].strip() not in r.annotation: new_ann.update({ps[0].strip(): ps[1].strip()}) else: hPs = re.findall(html_p, notes) if __DEBUG__: print(hPs) for p in hPs: ps = ( p.replace('<p>', '') .replace('</p>', '') .replace('&lt;', '<') .replace('&gt;', '>') .split(':', 1) ) if len(ps) == 2 and ps[0].strip() not in r.annotation: new_ann.update({ps[0].strip(): ps[1].strip()}) r.annotation.update(new_ann) if __DEBUG__: print(r.annotation) for s in fba.species: if level >= 3 or annotation_key in s.annotation: notes = '' if level >= 3: notes = s.getNotes() else: notes = s.annotation.pop(annotation_key) new_ann = {} if '<span xmlns="http://www.w3.org/1999/xhtml">' in notes: hPs = re.findall(html_p, notes.replace('\n', '')) if __DEBUG__: print(hPs) for p in hPs: ps = re.findall(html_span, p) ps = [ p.replace('<span>', '') .replace('</span>', '') .replace('&lt;', '<') .replace('&gt;', '>') .strip() for p in ps ] if len(ps) == 2 and ps[0].strip() not in s.annotation: new_ann.update({ps[0]: ps[1]}) else: hPs = re.findall(html_bigg_p, notes) if len(hPs) > 0: if __DEBUG__: print(hPs) for p in hPs: ps = ( p.replace('<html:p>', '') .replace('</html:p>', '') .replace('&lt;', '<') .replace('&gt;', '>') .split(':', 1) ) if len(ps) == 2 and ps[0].strip() not in s.annotation: new_ann.update({ps[0].strip(): ps[1].strip()}) else: hPs = re.findall(html_p, notes) if __DEBUG__: print(hPs) for p in hPs: ps = ( p.replace('<p>', '') .replace('</p>', '') .replace('&lt;', '<') .replace('&gt;', '>') .split(':', 1) ) if len(ps) == 2 and ps[0].strip() not in s.annotation: new_ann.update({ps[0].strip(): ps[1].strip()}) s.annotation.update(new_ann) if 'chemFormula' in s.annotation and ( s.chemFormula is None or s.chemFormula == '' ): s.chemFormula = s.annotation.pop('chemFormula') if __DEBUG__: print(s.annotation) elif 'FORMULA' in s.annotation and ( s.chemFormula is None or s.chemFormula == '' ): s.chemFormula = s.annotation.pop('FORMULA') if s.chemFormula != '' and not checkChemFormula(s.chemFormula): s.chemFormula = '' if ( (s.charge is None or s.charge == '' or s.charge == 0) and 'charge' in s.annotation and s.annotation['charge'] != '' ): chrg = s.annotation.pop('charge') try: s.charge = int(chrg) except ValueError: s.charge = None print( 'Invalid charge: {} defined for species {}'.format(chrg, s.getId()) ) if __DEBUG__: print(s.annotation) elif ( (s.charge is None or s.charge == '' or s.charge == 0) and 'CHARGE' in s.annotation and s.annotation['CHARGE'] != '' ): chrg = s.annotation.pop('CHARGE') try: s.charge = int(chrg) except ValueError: print( 'Invalid charge: {} defined for species {}'.format(chrg, s.getId()) ) s.charge = None if __DEBUG__: print(s.annotation) def processExchangeReactions(fba, key): """ Extract exchange reactions from model using *key* and return: - a dictionary of all exchange reactions without *medium* reactions - a dictionary of *medium* exchange reactions (negative lower bound) """ # extract all exchange bounds if key is None: fexDic = getExchBoundsDict(fba) else: fexDic = getBoundsDict(fba, substring=key) # extract the medium (exchange fluxes that allow uptake) MediumAll = [] Medium = [] for r in fexDic: if fexDic[r]['lb'] < 0.0: MediumAll.append((r, fexDic[r]['lb'], fexDic[r]['ub'])) Medium.append(r) if __DEBUG__: print(r, fexDic[r]) # remove medium from bounds dictionary and place in medium dict mediumDic = {} for m in Medium: mediumDic.update({m: fexDic.pop(m)}) if __DEBUG__: print('\nMedium') for m in MediumAll: print(m) print('mediumDic') print(mediumDic) print('\nr in fexDic') for r in mediumDic: print(r, r in fexDic) return fexDic, mediumDic def generateInputScanReports( fba, exDict, mediumDict, optimal_growth_rates, wDir, tag='' ): modName = fba.sourcefile modName += tag rnames = fba.getReactionNames() rid = fba.getReactionIds() F = file(os.path.join(wDir, '%s.medium.csv' % modName), 'w') F.write('J, lb, ub\n') for r in mediumDict: RN = rnames[rid.index(r)] F.write('%s, %s, %s, %s\n' % (r, mediumDict[r]['lb'], mediumDict[r]['ub'], RN)) F.write('\n') F.close() F = file(os.path.join(wDir, '%s.exchange.csv' % modName), 'w') F.write('J, lb, ub\n') for r in exDict: RN = rnames[rid.index(r)] F.write('%s, %s, %s, %s\n' % (r, exDict[r]['lb'], exDict[r]['ub'], RN)) F.write('\n') F.close() F = file(os.path.join(wDir, '%s.optima.csv' % modName), 'w') F.write('J, lb, ub, optimum, "name"\n') if __DEBUG__: print(rnames) print(rid) for r in optimal_growth_rates: RN = rnames[rid.index(r)] F.write( '%s, %s, %s, %s, "%s"\n' % (r, exDict[r]['lb'], exDict[r]['ub'], optimal_growth_rates[r], RN) ) F.write('\n') F.close() def getAllReactionsAssociatedWithGene( fba, gene, gene_annotation_key='GENE ASSOCIATION' ): out = [] for r in fba.reactions: GA = None if gene_annotation_key in r.annotation: GA = gene_annotation_key elif 'GENE ASSOCIATION' in r.annotation: GA = 'GENE ASSOCIATION' elif 'GENE_ASSOCIATION' in r.annotation: GA = 'GENE_ASSOCIATION' if GA != None: if gene in r.annotation[GA]: out.append(r.getId()) return out def scanForReactionDuplicates(f, ignore_coefficients=False): """ This method uses uses a brute force apprach to finding reactions with matching stoichiometry """ duplicates = [] for r in f.reactions: Rref = r.getSpeciesIds() Rref.sort() refspecies = '' for s in Rref: refspecies += '%s:' % s refspecies = refspecies[:-1] for r2 in f.reactions: Rtest = r2.getSpeciesIds() Rtest.sort() if Rref == Rtest and r.id != r2.id: if not ignore_coefficients: go = True for rgid in Rtest: if float(r.getReagentWithSpeciesRef(rgid).coefficient) != float( r2.getReagentWithSpeciesRef(rgid).coefficient ): go = False break if go: dup = [ r.id, r2.id, ] dup.sort() dup = dup + [ refspecies, f.getReaction(dup[0]).getName(), f.getReaction(dup[1]).getName(), ] if dup not in duplicates: duplicates.append(dup) else: dup = [ r.id, r2.id, ] dup.sort() dup = dup + [ refspecies, f.getReaction(dup[0]).getName(), f.getReaction(dup[1]).getName(), ] if dup not in duplicates: duplicates.append(dup) for d in duplicates: print(d) print('\nFound %s pairs of duplicate reactions' % len(duplicates)) return duplicates def countedPause(Tsec): print('\nPausing ... ',) for t in range(Tsec, -1, -1): print('\b\b\b{}'.format(t), end=" ") time.sleep(1) print('\b\b{}'.format('done.')) def addGenesFromAnnotations(fba, annotation_key='GENE ASSOCIATION', gene_pattern=None): """ THIS METHOD IS DERPRECATED PLEASE USE cmod.createGeneAssociationsFromAnnotations() Add genes to the model using the definitions stored in the annotation key - *fba* and fba object - *annotation_key* the annotation dictionary key that holds the gene association for the protein/enzyme - *gene_pattern* deprecated, not needed anymore """ print( '\nWARNING: CBTools.addGenesFromAnnotations IS DEPRECATED PLEASE USE cmod.createGeneAssociationsFromAnnotations()\n' ) fba.createGeneAssociationsFromAnnotations( annotation_key=annotation_key, replace_existing=True ) def getModelGenesPerReaction( fba, gene_pattern=None, gene_annotation_key='GENE ASSOCIATION' ): ''' Parse a BiGG style gene annotation string using default gene_pattern='(\(\W*\w*\W*\))' or (<any non-alphanum><any alphanum><any non-alphanum>) Old eColi specific pattern '(b\w*\W)' It is advisable to use the model methods directly rather than this function ''' react_gene = {} # gene_re = re.compile(gene_pattern) for r in fba.reactions: GA = None # print r.annotation if gene_annotation_key in r.annotation: GA = gene_annotation_key elif 'GENE ASSOCIATION' in r.annotation: GA = 'GENE ASSOCIATION' elif 'GENE_ASSOCIATION' in r.annotation: GA = 'GENE_ASSOCIATION' elif 'gene_association' in r.annotation: GA = 'gene_association' elif 'gene association' in r.annotation: GA = 'gene association' if GA != None: genes = extractGeneIdsFromString(r.annotation[GA]) # genes = re.findall(gene_re, r.annotation[GA]) # genes = [g.replace('(','').replace(')','').strip() for g in genes] # print r.annotation['GENE ASSOCIATION'] if len(genes) == 0: # print '\n' # print 'GA:', r.annotation['GENE ASSOCIATION'] # print r.getId(), genes # raw_input('x') genes = None # print r.getId(), genes # raw_input() react_gene.update({r.getId(): genes}) else: react_gene.update({r.getId(): None}) return react_gene def getReactionsPerGene(react_gene): gene_react = {} no_gene = [] for R in react_gene: if react_gene[R] is None: print('Reaction {} has no gene associated with it'.format(R)) no_gene.append(R) else: for G in react_gene[R]: ## GK = G.replace('(','').replace(')','').strip() if G in gene_react: print('Updating gene {} with reaction {}'.format(G, R)) gene_react[G].append(R) else: print('Adding gene {} to gene_react'.format(G)) gene_react.update({G: [R]}) genes = list(gene_react) return gene_react, genes, no_gene def removeFixedSpeciesReactions(f): """ This function is a hack that removes reactions which only have boundary species as reactants and products. These are typically gene associations encoded in the Manchester style and there is probably a better way of working around this problem ... - *f* an instantiated fba model object """ c_react = [] for rea in f.reactions: lsa = numpy.array( [f.getSpecies(r.species_ref).is_boundary for r in rea.reagents] ) if lsa.all(): c_react.append(rea.getId()) for r in c_react: f.deleteReactionAndBounds(r) def addFluxAsActiveObjective(f, reaction_id, osense, coefficient=1): """ Adds a flux as an active objective function - *reaction_id* a string containing a reaction id - *osense* objective sense must be **maximize** or **minimize** - *coefficient* the objective funtion coefficient [default=1] """ osense = osense.lower() if osense == 'max': osense = 'maximize' if osense == 'min': osense = 'minimize' if osense in ['maximise', 'minimise']: osense = osense.replace('se', 'ze') assert osense in ['maximize', 'minimize'], ( "\nosense must be ['maximize', 'minimize'] not %s" % osense ) assert reaction_id in [r.getId() for r in f.reactions], ( '\n%s is not avalid reaction' % reaction_id ) n_obj = CBModel.Objective(reaction_id + '_objf', osense) f.addObjective(n_obj, active=True) n_flux_obj = CBModel.FluxObjective( reaction_id + '_fluxobj', reaction_id, coefficient ) n_obj.addFluxObjective(n_flux_obj) def checkReactionBalanceElemental(f, Rid=None, zero_tol=1.0e-12): """ Check if the reaction is balanced using the chemical formula - *f* the FBA object - *Rid* [default = None] the reaction to check, defaults to all - *zero_tol* [default=1.0e-12] the floating point zero used for elemental balancing This function is derived from the code found here: http://pyparsing.wikispaces.com/file/view/chemicalFormulas.py """ assert HAVE_PYPARSING, '\nPyParsing needs to be installed for this method' if Rid is None: Rid = f.getReactionIds() elif isinstance(Rid, list): pass else: Rid = [Rid] ROUT = {} RCHARGE = {} for rid in Rid: R = f.getReaction(rid) reagents = [] netcharge = None for rr in R.reagents: CF = f.getSpecies(rr.species_ref).chemFormula chrg = f.getSpecies(rr.species_ref).charge if CF not in [None, '']: # print rid, rr.getId(), CF try: CFP = pp_chemicalFormula.parseString(CF) R2 = [(r[0], int(r[1])) for r in CFP] # print R2 # note this uses a net stoichiometry approach with signed coefficients reagents.append([rr.species_ref, rr.coefficient, CF, R2]) except pyparsing.ParseException: print('Invalid Chemical Formula ({}): {}'.format(rid, CF)) reagents.append([rr.species_ref, rr.coefficient, CF, None]) else: # note this uses a net stoichiometry approach with signed coefficients reagents.append([rr.species_ref, rr.coefficient, CF, None]) if chrg not in [None, '']: if netcharge is None: netcharge = float(chrg) * rr.coefficient else: netcharge += float(chrg) * rr.coefficient # if after all this we still do not have a charge make it all zero RCHARGE[rid] = netcharge ROUT[rid] = reagents Rres = {} for R in ROUT: Ed = {} for rr in ROUT[R]: if rr[3] != None: for s in rr[3]: if s[0] in Ed: Ed.update({s[0]: Ed[s[0]] + rr[1] * s[1]}) else: Ed.update({s[0]: rr[1] * s[1]}) else: pass # print('Invalid or no chemical formula defined for reagent: {}'.format(rr[0])) if len(Ed) > 0: CBAL = True EBAL = True else: CBAL = False EBAL = False for e in Ed: if abs(Ed[e]) >= zero_tol: EBAL = False if RCHARGE[R] is None or abs(RCHARGE[R]) >= zero_tol: CBAL = False Rres.update( { R: { 'id': R, 'charge_balanced': CBAL, 'element_balanced': EBAL, 'elements': Ed.copy(), 'charge': RCHARGE[R], 'stuff': ROUT[R], } } ) if CBAL and EBAL: f.getReaction(R).is_balanced = True else: f.getReaction(R).is_balanced = False return Rres def scanForUnbalancedReactions(f, output='all'): """ Scan a model for unbalanced reactions, returns a tuple of dictionaries balanced and unbalanced: - *f* an FBA model instance - *output* [default='all'] can be one of ['all','charge','element'] - *charge* return all charge **un** balanced reactions - *element* return all element **un** balanced reactions """ bcheck = checkReactionBalanceElemental(f) badD = bcheck.copy() out = {} all_balanced = {} charge_balanced = {} element_balanced = {} for b in bcheck: if bcheck[b]['charge_balanced'] and bcheck[b]['element_balanced']: all_balanced.update({b: badD.pop(b)}) elif bcheck[b]['charge_balanced']: charge_balanced.update({b: badD.pop(b)}) elif bcheck[b]['element_balanced']: element_balanced.update({b: badD.pop(b)}) if output == 'charge': out.update(element_balanced) elif output == 'element': out.update(charge_balanced) else: out.update(element_balanced) out.update(charge_balanced) print(len(bcheck), len(badD)) return out def createZipArchive(zipname, files, move=False, compression='normal'): """ Create a zip archive which contains one or more files - *zipname* the name of the zip archive to create (fully qualified) - *files* either a valid filename or a list of filenames (fully qualified) - *move* [default=False] attempt to delete input files after zip-archive creation - *compression* [default='normal'] normal zip compression, set as None for no compression only store files (zlib not required) """ if compression is None: compression = zipfile.ZIP_STORED else: compression = zipfile.ZIP_DEFLATED zf = zipfile.ZipFile(zipname, mode='w', compression=compression) if isinstance(files, list) or isinstance(files, tuple): files = [files] for f_ in files: assert os.path.exists(f_), 'ERROR: file \"{}\" does not exist'.format(f_) for f_ in files: zf.write(f_, arcname=os.path.split(f_)[-1]) zf.close() if move: for f_ in files: try: os.remove(f_) except Exception as ex: print(ex) print( '\nINFO: {} input file(s) moved to archive \"{}\".'.format( len(files), zipname ) ) else: print('\nINFO: zip-archive \"{}\" created.'.format(zipname)) def checkExchangeReactions(fba, autocorrect=True): """ Scan all reactions for exchange reactions (reactions containing a boundary species), return a list of inconsistent reactions or correct automatically. - *fba* a CBMPy model - *autocorrect* [default=True] correctly set the "is_exchange" attribute on a reaction """ badR = [] for r_ in fba.reactions: has_fixed = False if True in [fba.getSpecies(rr_.species_ref).is_boundary for rr_ in r_.reagents]: has_fixed = True if r_.is_exchange and not has_fixed: print( 'WARNING: reaction {} is labelled as an exchange reaction but has no fixed reagents.'.format( r_.getId() ) ) if autocorrect: print('INFO: is_exchange reaction attribute corrected') r_.is_exchange = has_fixed badR.append(r_.getId()) elif not r_.is_exchange and has_fixed: print( 'WARNING: reaction {} is not labelled as an exchange reaction but contains a fixed reagent.'.format( r_.getId() ) ) if autocorrect: print('INFO: is_exchange reaction attribute corrected') r_.is_exchange = has_fixed badR.append(r_.getId()) return badR def checkIds(fba, items='all'): """ Checks the id's of the specified model attributes to see if the name is legal and if there are duplicates. Returns a list of items with errors. - *fba* a CBMPy model instance - *items* [default='all'] 'all' means 'species,reactions,flux_bounds,objectives' of which one or more can be specified """ if items == 'all': items = [ a.strip() for a in 'species,reactions,flux_bounds,objectives'.split(',') ] else: items = [a.strip() for a in items.split(',')] for i_ in range(len(items) - 1, -1, -1): if not hasattr(fba, items[i_]): print( 'ERROR: bad descriptor \"{}\" removing from input list'.format( items.pop(i_) ) ) output = {} iddump = [] for i_ in items: output[i_] = [] ITEMS = fba.__getattribute__(i_) for I_ in ITEMS: Id = I_.getId() if Id in iddump: print('INFO: duplicate \"{}\" id: {}'.format(i_, Id)) output[i_].append(I_) else: iddump.append(Id) if i_ == 'reactions': if 'reagents' not in output: output['reagents'] = [] for rr_ in I_.reagents: rrid = rr_.getId() if rrid in iddump: print('INFO: duplicate \"reagent\" id: {}'.format(rrid)) if rr_ not in output['reagents']: output['reagents'].append(rr_) else: iddump.append(rrid) if i_ == 'objectives': if 'fluxObjectives' not in output: output['fluxObjectives'] = [] for fo_ in I_.fluxObjectives: foid = fo_.getId() if foid in iddump: print('INFO: duplicate \"fluxObjective\" id: {}'.format(foid)) if fo_ not in output['fluxObjectives']: output['fluxObjectives'].append(fo_) else: iddump.append(foid) if len(output) == 0: print( '\nWARNING: no valid object descriptors found, please check your function call!' ) return output def checkFluxBoundConsistency(fba): """ Check flux bound consistency checks for multiply defined bounds, bounds without a reaction, inconsistent bounds with respect to each other and reaction reversbility. Returns a dictionary of bounds/reactions where errors occur. """ dupIDs = checkIds(fba, items='flux_bounds')['flux_bounds'] if len(dupIDs) > 0: print('\nERROR: {} duplicate flux_bound Id\'s detected!'.format(len(dupIDs))) LB = {} UB = {} EB = {} eMB = {'lower': {}, 'upper': {}, 'equality': {}} noreaction = [] for fb in fba.flux_bounds: raw_type = fb.is_bound get_type = fb.getType() RID = fb.getReactionId() if raw_type != get_type: print( 'WARNING: incorrect bound type for operation: \"{}\" old \"{}\" --> \"{}\"'.format( fb.operation, raw_type, get_type ) ) if get_type == 'lower': if RID in LB: print( 'ERROR multiple LOWER bounds defined for reaction: \"{}\"'.format( RID ) ) if RID in eMB['lower']: eMB['lower'][RID].append(fb) else: eMB['lower'][RID] = [fb] LB[RID] = fb if get_type == 'upper': if RID in UB: print( 'ERROR multiple UPPER bounds defined for reaction: \"{}\"'.format( RID ) ) if RID in eMB['upper']: eMB['upper'][RID].append(fb) else: eMB['upper'][RID] = [fb] UB[RID] = fb if get_type == 'equality': if RID in EB: print( 'ERROR multiple EQUAL bounds defined for reaction: \"{}\"'.format( RID ) ) if RID in eMB['equality']: eMB['equality'][RID].append(fb) else: eMB['equality'][RID] = [fb] EB[RID] = fb if fba.getReaction(RID) is None: noreaction.append(fb) for mb_ in list(eMB['lower']): if len(eMB['lower'][mb_]) == 1: eMB['lower'].pop(mb_) for mb_ in list(eMB['upper']): if len(eMB['upper'][mb_]) == 1: eMB['upper'].pop(mb_) for mb_ in list(eMB['equality']): if len(eMB['equality'][mb_]) == 1: eMB['equality'].pop(mb_) undefined = {'no_upper': [], 'no_lower': [], 'no_upper_lower': []} for r_ in fba.getReactionIds(): LBdef = True UBdef = True if r_ not in EB: if r_ not in LB: LBdef = False if r_ not in UB: UBdef = False if not LBdef and not UBdef: print('WARNING: No bounds defined for reaction: \"{}\"'.format(r_)) undefined['no_upper_lower'].append(r_) else: if not LBdef: print('WARNING: No LOWER BOUND defined for reaction: \"{}\"'.format(r_)) undefined['no_lower'].append(r_) if not UBdef: print('WARNING: No UPPER BOUND defined for reaction: \"{}\"'.format(r_)) undefined['no_upper'].append(r_) errors = { 'eq+lb': [], 'eq+ub': [], 'duplicate_ids': dupIDs, 'multiple_defines': eMB, 'lb>ub': [], 'undefined': undefined, 'rev_contradict': [], 'no_reaction': noreaction, } for k_ in EB: if k_ in LB: errors['eq+lb'].append((EB[k_], LB[k_])) if k_ in UB: errors['eq+ub'].append((EB[k_], UB[k_])) checked = [] for k_ in LB: if k_ in UB and k_ not in checked: if not LB[k_].getValue() <= UB[k_].getValue(): print( 'ERROR: Reaction {} has lower bound ({}) larger than upper bound ({})'.format( k_, LB[k_].getValue(), UB[k_].getValue() ) ) errors['lb>ub'].append((LB[k_], UB[k_])) checked.append(k_) assR = fba.getReaction(LB[k_].getReactionId()) if assR != None: if not assR.reversible: if LB[k_].getValue() < 0.0: print( 'ERROR: Reaction {} is marked as irreversible but has a negative lower bound ({})'.format( assR.getId(), LB[k_].getValue() ) ) errors['rev_contradict'].append(assR) del assR for k_ in UB: if k_ in LB and k_ not in checked: if not LB[k_].getValue() <= UB[k_].getValue(): print( 'ERROR: Reaction {} has lower bound ({}) larger than upper bound ({})'.format( k_, LB[k_].getValue(), UB[k_].getValue() ) ) errors['lb>ub'].append((LB[k_], UB[k_])) checked.append(k_) return errors def roundOffWithSense(val, osense='max', tol=1e-8): """ Round of a value in a way that takes into consideration the sense of the operation that generated it - *val* the value - *osense* [default='max'] the sense - *tol* [default=1e-8] the tolerance of the roundoff factor """ if osense.lower() in ['min', 'minimize', 'minimise']: val = numpy.ceil(val / tol) * tol else: val = numpy.floor(val / tol) * tol return val def mergeGroups(m, groups, new_id, new_name='', auto_delete=False): """ Merge a list of groups into a new group. Note, annotations are not merged! - *m* the model containing the source groups - *groups* a list of groups - *new_id* the new, merged, group id - *new_name* [default=''] the new group name, the default behaviour is to merge the old names - *auto_delete* [default=False] delete the source groups """ if type(groups) == list and len(groups) > 1: badgid = [] m_gids = m.getGroupIds() for gnew in groups: if gnew not in m_gids: badgid.append(gnew) if len(badgid) > 0: print('ERROR: groups contains invalid group ids: {}'.format(str(badgid))) return False else: print('ERROR: groups must be a list with more than one element.') return False if m.getGroup(new_id) is not None: print('ERROR: new_id {} already exists'.format(new_id)) return False m.createGroup(new_id) gnew = m.getGroup(new_id) make_name = False if new_name == '': make_name = True for gid in groups: gobj = m.getGroup(gid) if make_name: new_name = '{}+{}'.format(new_name, gobj.getName()) for gm in gobj.members: if gm not in gnew.members: gnew.addMember(gm) if auto_delete: m.deleteGroup(gid) if make_name: new_name = new_name[1:] gnew.setName(new_name) return True def merge2Models(m1, m2, ignore=None, ignore_duplicate_ids=False): """ Merge 2 models, this method does a raw merge of model 2 into model 1 without any model checking. Component id's in ignore are ignored in both models and the first objective of model 1 is arbitrarily set as active. Compartments are also merged and a new "OuterMerge" compartment is also created. In all cases duplicate id's are tracked and ignored, essentially using the object id encountered first - usually that of model 1. Duplicate checking can be disabled by setting the *ignore_duplicate_ids* flag. - *m1* model 1 - *m2* model 2 - *ignore* [[]] do not merge these id's - *ignore_duplicate_ids* [False] default behaviour that can be enabled In development: merging genes and gpr's. """ if ignore is None: ignore = [] out = CBModel.Model(m1.getId() + m2.getId()) out.setName(m1.getName() + m2.getName()) out.createCompartment('OuterMerge', size=1.0, dimensions=3) idstore = [] for x_ in m1.compartments + m2.compartments: sid = x_.getId() if sid not in ignore: if ignore_duplicate_ids or sid not in idstore: idstore.append(sid) out.addCompartment(x_.clone()) for s_ in m1.species + m2.species: sid = s_.getId() if sid not in ignore: if ignore_duplicate_ids or sid not in idstore: idstore.append(sid) out.addSpecies(s_.clone()) else: print('Skipping duplicate id: \"{}\"'.format(sid)) else: print('Skipping ignored id: \"{}\"'.format(sid)) for r_ in m1.reactions + m2.reactions: sid = r_.getId() if r_.getId() not in ignore: if ignore_duplicate_ids or sid not in idstore: idstore.append(sid) out.addReaction(r_.clone(), create_default_bounds=False) else: print('Skipping duplicate id: \"{}\"'.format(sid)) else: print('Skipping ignored id: \"{}\"'.format(sid)) for f_ in m1.flux_bounds + m2.flux_bounds: sid = f_.getId() if f_.getId() not in ignore: if ignore_duplicate_ids or sid not in idstore: idstore.append(sid) out.addFluxBound(f_.clone()) else: print('Skipping duplicate id: \"{}\"'.format(sid)) else: print('Skipping ignored id: \"{}\"'.format(sid)) GO = True for o_ in m1.objectives + m2.objectives: sid = o_.getId() if o_.getId() not in ignore: if ignore_duplicate_ids or sid not in idstore: idstore.append(sid) if GO: out.addObjective(o_.clone(), active=True) GO = False else: out.addObjective(o_.clone(), active=False) else: print('Skipping duplicate id: \"{}\"'.format(sid)) else: print('Skipping ignored id: \"{}\"'.format(sid)) print('\nAdded {} components to merged model'.format(len(idstore))) idstore = [] return out
I have always been fascinated by history. I like to know how things were—before we messed with them, originally, in the past. So Deirdre Portnoy had my immediate attention when I found out she was ripping up the front lawn at the Wellfleet Historical Society to put in a 19th century cottage garden in place of the weeds and crabgrass. Cottage gardens are the old English kind—that distinct style of dense plantings hedged with boxwoods and lined with picket fences and brick pathways in a way that is at once homely, graceful, and charming. Portnoy can't be sure that's what was on the lawn of the old 1860s home that houses the historical society—there are very few pictures of Wellfleet front yards from that period, and even in the ones we do have, it's difficult to make out the varieties and organization of the plants—but she knows that stylized cottage gardens were incredibly popular at the time. In a town like Wellfleet—coastal, busy, prosperous—it's as likely a guess as any. And so Portnoy ripped up the lawn—pulled out the crabgrass and started turning earth. Locals donated a granite step, and bricks and help to put in a walkway that leads down from the sidewalk to the old millstone that steps up to the front door. Portnoy lined the front with a picket fence, the sides with English boxwoods, and started filling in the middle with flowers and fruits and culinary and medicinal herbs. She planted all sorts of things—too many to list—but I'll give you the highlights. There's a Seckel pear tree—short, semi-dwarfed, with small, sweet fruit that's good for canning. For medicinals there's bee balm and rue and echinachea, and a particular kind of yarrow known as Achillea millefolium. For cooking herbs there are rosemary and thyme and chives and sage, for fruits a whole bank of strawberries, and plans for heirloom tomatoes come spring. There are callendula flowers and nasturtiums and a beautiful Cape Cod climbing rose twining around the fence. The biggest surprise, Portnoy says, was how many varieties of herbs and flowers she found from the cottage period are still in use today. Some plants she had to dig around for—it's hard to find specific historic varieties these days, now that many tags don't carry the second Latin name—but between the garden stores and donations, she managed to get the yard pretty well filled in. Next spring, she's hoping to make an educational brochure with the historic uses and plant names, and she left room for walking in curved, ampitheater-style pathways. If you have a chance, go check it out. It's still a work in progress, and the weather isn't doing much for its looks, but the layout of it, the look, is very inspiring. And just in case you get really inspired, below I've made a list of the books Portnoy used in the project, to help you with your own come spring. This post IS inspiring, as is your blog. We move to Wellfleet in a few wks, and your blog is the best preview of all I am looking foward to. The local food movement has really taken off since we last lived there. thank you jhope. i find your work inspiring as well...it's so important to put our kids in clothes that are both safe and adorable. very excited to hear we'll have you as a neighbor soon! i think you'll find it isn't hard to get inspired in a place like this.
# -*- coding: utf-8 -*- import sys import pickle import json import ast import io from ate_rule_learn import write_json CONST_POS_SCORE = "POS_SCORE" CONST_NEG_SCORE = "NEG_SCORE" hindi_word_net = sys.argv[3] word2Synset = pickle.load(open( hindi_word_net + "/WordSynsetDict.pk")) synonyms = pickle.load(open(hindi_word_net + "/SynsetWords.pk")) # Returns the synonyms of the word from Hindi SWN def get_synonyms(word): output = [] syn_map_list = [] if word2Synset.has_key(word): synsets = word2Synset[word] for pos in synsets.keys(): for synset in synsets[pos]: if synonyms.has_key(synset): synDict = synonyms[synset] syn_map_list.append(synDict) for syn_map in syn_map_list: for word_synoyms_list in syn_map.values(): output.extend(word_synoyms_list) return output # Loads model from the model file def load_terms_output(file_path): with open(file_path) as terms_output_file: terms_otuput = json.load(terms_output_file) terms_otuput = ast.literal_eval(json.dumps(terms_otuput, ensure_ascii=False, encoding='utf8')) terms_output_file.close() return terms_otuput # Creates a lexicon of words from the Hindi SWN. # This includes the positive and negative polarity scores for each word # from the SWN def generate_lexicon(hindi_swn_dir): lexicon = {} swn_file = hindi_swn_dir + "/HSWN_WN.txt" with io.open(swn_file, 'r', encoding='utf8') as f: for line in iter(f): line = line.rstrip() if line: data = line.split() pos_score = float(data[2]) neg_score = float(data[3]) words = data[4] words = words.split(',') for word in words: word_map = {} word_map[CONST_POS_SCORE] = pos_score word_map[CONST_NEG_SCORE] = neg_score lexicon[word] = word_map return lexicon # Return the effective score of a word or its synonym from lexicon. # Effective score is the difference between its positive polarity and # negative polarity. def get_score(word, swn_lexicon): score = 0 word_synoyms = [] word = word.decode("utf-8") if word not in swn_lexicon: word_synoyms = get_synonyms(word) else: word_synoyms.append(word) for word in word_synoyms: if word in swn_lexicon: pos_score = swn_lexicon[word][CONST_POS_SCORE] neg_score = swn_lexicon[word][CONST_NEG_SCORE] score = pos_score - neg_score break return score # Detects polarity of aspect terms from Hindi SWN Lexicon def detect_polarity(terms_output, swn_lexicon): terms_polarity_output = {} for sent_id, sent_map in terms_output.iteritems(): sent_polarity = {} for aspect_term, quality_words in sent_map.iteritems(): polarity = "neu" score = 0 for quality_word in quality_words: score += get_score(quality_word, swn_lexicon) if score > 0: polarity = "pos" elif score < 0: polarity = "neg" sent_polarity[aspect_term] = polarity terms_polarity_output[sent_id] = sent_polarity return terms_polarity_output # Main function if __name__ == '__main__': # loading aspect terms rule based output terms_output = load_terms_output(sys.argv[1]) # generating Hindi Sentiwordnet lexicon hindi_swn_dir = sys.argv[2] swn_lexicon = generate_lexicon(hindi_swn_dir) # detecting polarity terms_polarity_output = detect_polarity(terms_output, swn_lexicon) # writing output to file write_json(terms_polarity_output, sys.argv[4])
Back with the third instalment of its cult Ozweego sneakers, powerhouse team adidas x Raf Simons shows no signs of slowing down. With the infamously futuristic style of previous editions, the next generation Ozweego III showcases a distinct runner's profile. Crafted from leather and mesh, it features trademark silicone windows and bubbles, contrasted with oversized eyelets. Seated on a cushioned Adiprene sole, it’s a pair to keep you on your toes.
from django.conf import settings from django.test import TestCase from django.test.utils import override_settings from gallery.models import Image, Gallery, Tag from family_tree.models import Family, Person from message_queue.models import Queue, Message from facial_recognition.resize_tags import resize_tags import os import shutil import threading @override_settings(SSLIFY_DISABLE=True, MEDIA_ROOT=settings.MEDIA_ROOT_TEST, MEDIA_URL=settings.MEDIA_URL_TEST, AWS_STORAGE_BUCKET_NAME=settings.AWS_STORAGE_BUCKET_NAME_TEST, FACE_RECOG_TRAIN_TEMP_DIR = settings.FACE_RECOG_TRAIN_TEST_DIR) class ResizeTagsTestCase(TestCase): # pragma: no cover def setUp(self): ''' Need to create a family and a gallery ''' self.family = Family() self.family.save() self.gallery = Gallery.objects.create(title="test_gallery", family_id=self.family.id) self.test_image = os.path.join(settings.BASE_DIR, 'facial_recognition/tests/test_image_woman.jpg') self.test_image_destination = ''.join([settings.MEDIA_ROOT, 'galleries/', str(self.family.id), '/', str(self.gallery.id), '/test_image.jpg']) self.test_image_s3_key = ''.join(['galleries/', str(self.family.id), '/', str(self.gallery.id), '/test_image.jpg']) directory = ''.join([settings.MEDIA_ROOT, 'galleries/', str(self.family.id), '/', str(self.gallery.id)]) if not os.path.exists(directory): os.makedirs(directory) #Copy test image to media area shutil.copy2(self.test_image, self.test_image_destination) self.image = Image(gallery=self.gallery, family=self.family, original_image=''.join(['galleries/', str(self.family.id), '/', str(self.gallery.id), '/test_image.jpg'])) self.image.save() self.image.upload_files_to_s3() self.person = Person(name='Wallace', gender='M', email='wallace@creaturecomforts.com', family_id=self.family.id, language='en') self.person.save() self.tag = Tag.objects.create(image_id=self.image.id, x1=0.3, y1=0.2, x2=0.5, y2=0.4, person_id=self.person.id) def tearDown(self): self.image.delete_local_image_files() threading.Thread(target=self.image.delete_remote_image_files).start() try: os.remove(self.test_image_destination) except: pass def test_tag_resizes(self): # Create a message to resize tag resize_tag_queue_id = Queue.objects.get(name='resize_tag').id message = Message.objects.create(queue_id=resize_tag_queue_id, integer_data = self.tag.id) resize_tags([message]) resized_tag = Tag.objects.get(pk=self.tag.id) self.assertTrue(abs(0.2875 - resized_tag.x1) < 0.001) self.assertTrue(abs(0.1951 - resized_tag.y1) < 0.001) self.assertTrue(abs(0.5575 - resized_tag.x2) < 0.001) self.assertTrue(abs(0.3959 - resized_tag.y2) < 0.001)
Why don't pineapples yield well on the land where there are perinial grasses like spear grasses, wondering Jews, etc? How can I improve on the land with these grasses to also be suitable for pineapple growing? (Odong Michael, 35yrs, Gulu District, Northern Uganda).
import logging from StringIO import StringIO import matplotlib.pyplot as plt import numpy as np import tensorflow as tf class TensorboardLogger(object): """Logging in tensorboard without tensorflow ops. Adapted from https://gist.github.com/gyglim/1f8dfb1b5c82627ae3efcfbbadb9f514""" def __init__(self, writer=None, log_dir=None): """Creates a summary writer logging to log_dir.""" self.logger = logging.getLogger('mview3d.' + __name__) if writer is not None: self.writer = writer else: if log_dir is not None: self.writer = tf.summary.FileWriter(log_dir, flush_secs=30) else: self.logger.error( 'At least one of writer or log_dir has to be not None') self.writer = None def log_scalar(self, tag, value, step): """Log a scalar variable. Parameter ---------- tag : basestring Name of the scalar value step : int training iteration """ summary = tf.Summary( value=[tf.Summary.Value(tag=tag, simple_value=value)]) self.writer.add_summary(summary, step) self.writer.flush() def log_images(self, tag, images, step): """Logs a list of images.""" im_summaries = [] for nr, img in enumerate(images): # Write the image to a string s = StringIO() plt.imsave(s, img, format='png') # Create an Image object img_sum = tf.Summary.Image( encoded_image_string=s.getvalue(), height=img.shape[0], width=img.shape[1]) # Create a Summary value im_summaries.append( tf.Summary.Value(tag='%s/%d' % (tag, nr), image=img_sum)) # Create and write Summary summary = tf.Summary(value=im_summaries) self.writer.add_summary(summary, step) self.writer.flush() def log_histogram(self, tag, values, step, bins=1000): """Logs the histogram of a list/vector of values.""" # Create histogram using numpy counts, bin_edges = np.histogram(values, bins=bins) # Fill fields of histogram proto hist = tf.HistogramProto() hist.min = float(np.min(values)) hist.max = float(np.max(values)) hist.num = int(np.prod(values.shape)) hist.sum = float(np.sum(values)) hist.sum_squares = float(np.sum(values**2)) # Requires equal number as bins, where the first goes from -DBL_MAX to bin_edges[1] # See https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/framework/summary.proto#L30 # Thus, we drop the start of the first bin bin_edges = bin_edges[1:] # Add bin edges and counts for edge in bin_edges: hist.bucket_limit.append(edge) for c in counts: hist.bucket.append(c) # Create and write Summary summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=hist)]) self.writer.add_summary(summary, step) self.writer.flush()
Arne Norell was a multifaceted designer, who experimented with many combinations of different materials. He gained an interest in classic furniture and in 1954 he founded the company Möbel AB Arne Norell where he designs and manufactures furniture. His most famous and popular piece is the chair "Ari", created in 1966. In 1973, the British furniture manufacturers reward him with "showpiece of the Year" (the gem of the year). His company still manufactures, using his designs under the name of "Ab Norell Möbler."
import os import tempfile import shutil from hyperspyui.version import __version__ from hyperspyui.__main__ import get_splash import pytest from qtpy import QtCore # QtWebEngineWidgets must be imported before a QCoreApplication instance # is created (used in eelsdb plugin) # Avoid a bug in Qt: https://bugreports.qt.io/browse/QTBUG-46720 from qtpy import QtWebEngineWidgets QCoreApplication = QtCore.QCoreApplication QSettings = QtCore.QSettings QCoreApplication.setApplicationName("HyperSpyUI-tests") QCoreApplication.setOrganizationName("Hyperspy") QCoreApplication.setApplicationVersion(__version__) QSettings.setDefaultFormat(QSettings.IniFormat) _tmpdirpath = '' def pytest_configure(config): global _tmpdirpath _tmpdirpath = tempfile.mkdtemp() userpath = os.path.join(_tmpdirpath, 'user') syspath = os.path.join(_tmpdirpath, 'sys') os.mkdir(userpath) os.mkdir(syspath) QSettings.setPath(QSettings.IniFormat, QSettings.UserScope, userpath) QSettings.setPath(QSettings.IniFormat, QSettings.SystemScope, syspath) settings = QSettings() settings.setValue( 'plugins/Version selector/check_for_updates_on_start', False) def pytest_unconfigure(config): shutil.rmtree(_tmpdirpath) @pytest.fixture(scope='session') def mainwindow(qapp): from hyperspyui.mainwindow import MainWindow window = MainWindow(get_splash(), argv=[]) yield window qapp.processEvents() window.close() window.deleteLater() del window qapp.processEvents()
Þorbjörg Finnbogadóttir og Auður Höskuldsdóttir færðu Bráðadeild stofnunarinnar þ. 6. júní afrakstur styrktar og minningartónleika um Magnús Frey Sveinbjörnsson, son Þorbjargar og Sveinbjörns Magnússonar. Tók deildarstjóri Bráðadeildar, Auður H. Ólafsdóttir, við gjöfinni, 100 þúsund krónum, sem koma sér afar vel fyrir deildina. Er aðstandendum tónleikanna og flytjendum færðar hugheilar þakkir fyrir framlag þeirra. Have you thought about what you want people to say about you after you're gone? Can you hear the voice saying, "He was a great man." Or "She really will be missed." What michael kors outlet else do they say? One of the strangest wholesales michael kors bags online phenomena of life is to louis vuitton bags engage in a work that http://www.michaelkorsoutlet-mk.us.com will last long after death. Louis vuitton outlet Isn't that a lot like Louis Vuitton Store investing all your money so LV Purses that future generations can bare michael kors outlet online sale interest on it? Perhaps, yet prada outlet if you look deep in gucci bags your own heart, you'll find prada online outlet something drives you to make Cheap Louis Vuitton Bags this kind of contribution -- Louis Vuitton Outlet stores something drives every human being Louis Vuitton Purses to find a purpose that Michael Kors Outlet lives on after death. Do you hope to louis vuitton outlet memorialize your name? Have a oakley outlet store name that is whispered with oakley reverent�����ϵģ� awe? Do you hope louis vuitton stores to have your face carved http://www.louisvuitton-lv.us.com upon 50 ft of granite�������ң� http://www.louisvuittonoutletsyear.com rock? Is the answer really LV that simple? Is the purpose http://www.louisvuittonoutlet-2015.com of lifetime contribution an ego-driven Louis Vuitton desire for a mortal being new balance outlet online to have an immortal name oakley outlet online or is it something more? A child alive chanel outlet today will die tomorrow. A chanel bags baby that had the potential Michael Kors satchel bag to be the next Einstein Cheap Louis Vuitton will die from complication is Louis Vuitton Bags 2015 at birth. The circumstances of http://www.prada-outlet-onlines.net life are not set in gucci outlet online stone. We are not all Louis Vuitton Outlet meant to live life through http://www.wholesale-michaelkorsbagsonline.com to old age. We've grown oakley outlet discount to perceive life3 as a http://www.oakleysunglasses-wholesales.com full cycle with a certain http://www.louisvuittonchristmas.cc number of years in between. Louis Vuitton Outlet Handbags If all of those years Louis Vuitton Sale aren't lived out, it's a Louis Vuitton Outlet Online tragedy. A tragedy because a Louis Vuitton Handbags outlet human's potential was never realized. oakley outlet A tragedy because a spark Michael Kors Purses was snuffed out before it Michael Kors handbags sale ever became a flame. By virtue of inhabiting Michael Kors handbags wholesale a body we accept these louis vuitton wallets risks. We expose our mortal michael kors handbags flesh to the laws of Louis Vuitton outlet online the physical environment around us. http://www.cheaplouisvuittonbagso.com The trade off isn't so michael kors bags bad when you think about http://www.louisvuitton-bags-2015.com it. The problem comes when gucci outlet 2014 we construct mortal fantasies of oakley store what life should be like. http://www.louisvuitton-sunglassess.com When life doesn't conform to Michael Kors outlet our fantasy we grow upset, Michael Kors bags wholesale frustrated, or depressed. We are alive; let us louis vuitton handbags live. We have the ability http://www.newbalance-factory-outlet.com to experience; let us experience. Louis Vuitton Outlet online We have the ability to LV handbags learn; let us learn. The louis vuitton luggage meaning of life can be michael kors grasped in a moment. A Michael Kors outlet store moment so brief it often chanel outlet online evades our perception. What meaning stands behind the michael kors outlet online store sale us dramatic unfolding of life? What Louis Vuitton Sunglasses single truth can we grasp http://www.louisvuittonnew-lv.com and hang onto for dear Louis Vuitton outlet life when all other truths Louis Vuitton Outlet Store around us seem to fade http://www.louisvuitton-lvpurses.com with time? These http://www.louisvuittonstore2015.com moments are strung together in Louis Vuitton Bags a series we call events. louis vuitton outlet 2015 These events are strung together in a series we call life. When we seize the moment and bend it according to our will, a will driven by the spirit deep inside us, then we have discovered the meaning of life, a meaning for us that shall go on long after we michael kors satchel bags outlets depart this Earth. E arly on the morning of August 19, 1946, I was born under a clear sky after a violent summer storm Nike Air Max 90 Shoes to a widowed mother in the Julia Chester Hospital in Hope, a town of about six Ray Ban 3025 Sunglasses 62Mm thousand in southwest Arkansas, thirty-three miles east of the Texas border at Texarkana. My mother named ray ban sunglasses rb3025 me William Jefferson Blythe III after my father, William Jefferson Blythe Jr., one of nine children cheap ray ban 2140 of a poor farmer in Sherman, Texas, who died when my father was seventeen. According to coach outlet his sisters, my father always tried to take care of them, and he grew up to fake ray bans uk be a handsome, hardworking, fun-loving man. He met my mother at Tri-State Hospital in Shreveport, Louisiana, http://www.qualitycheapcoachpurse.com in 1943, when she was training to be a nurse. Many times when I was growing cheap ray ban 2140 up, I asked Mother to tell me the story of their meeting, courting, and marriage. He nike air max 1 premium brought a date with some kind of medical emergency into the ward where she was working, coach outlet online and they talked and flirted while the other woman was being treated. On his way out oakley glasses of the hospital, he touched the finger on which she was wearing her boyfriends ring and ray ban sunglasses for sale asked her if she was married. She stammered noshe was single. The next day he sent michael kors outlet purses the other woman flowers and her heart sank. Then he called Mother for a date, explaining shop ray ban sunglasses that he always sent flowers when he ended a relationship. Two months later, they were married and Louis Vuitton neverfull bags he was off to war. He served in a motor pool in the invasion of Italy, Oakley Sunglasses repairing jeeps and tanks. After the war, he returned to Hope for Mother and they moved air max 2015 to Chicago, where he got back his old job as a salesman for the Manbee Equipment oakley sunglasses sale shop Company. They bought a little house in the suburb of Forest Park but couldnt move in Ray Ban UK Shop for a couple of months, and since Mother was pregnant with me, they decided she should nike Air Max go home to Hope until they could get into the new house. On May 17, 1946, ray ban for sale after moving their furniture into their new home, my father was driving from Chicago to Hope oakley sunglasses to fetch his wife. Late at night on Highway 60 outside of Sikeston, Missouri, he lost Ray Ban control of his car, a 1942 Buick, when the right front tire blew out on a cheap ralph lauren online store uk wet road. He was thrown clear of the car but landed in, or crawled into, a http://www.nikeairmaxtheapremium.com drainage ditch dug to reclaim swampland. The ditch held three feet of water. When he was http://www.michaelkorsoutletpurses.com found, after a two-hour search, his hand was grasping a branch above the waterline. He had niek Air Jordan 11 tried but failed to pull himself out. He drowned, only twenty-eight years old, married two years niek Air Jordan 11 and eight months, only seven months of which he had spent with Mother. That brief sketch is nike air jordan shoes store about all I ever really knew about my father. All my life I have been hungry Sunglasses Ray Ban On Sale to fill in the blanks, clinging eagerly to every photo or story or scrap of paper Oakeyley Sunglasses Wholesale that would tell me more of the man who gave me life. When I was about twelve, lauren by ralph lauren bedding sitting on my uncle Buddys porch in Hope, a man walked up the steps, looked at Oakley Outlet Store me, and said, Youre Bill Blythes son. You look just like him. I beamed for days. In nike air max 1974, I was running for Congress. It was my first race and the local paper did b&l ray ban usa a feature story on my mother. She was at her regular coffee shop early in the ray ban 4147 review morning discussing the article with a lawyer friend when one of the breakfast regulars she knew nike air max 1 only casually came up to her and said, I was there, I was the first one coach handbags on sale at the wreck that night. He then told Mother what he had seen, including the fact Michael Kors bags outlet that my father had retained enough consciousness or survival instinct to try to claw himself up http://www.airjordan5forsale.com and out of the water before he died. Mother thanked him, went out to her car ray ban 4147 sunglasses and cried, then dried her tears and went to work. In 1993, on Fathers Day, my first oakley sunglasses cheap as President, the Washington Post ran a long investigative story on my father, which was followed air jordan shoes over the next two months by other investigative pieces by the Associated Press and many smaller Air Jordan 4 Retro papers. The stories confirmed the things my mother and I knew. They also turned up a ray ban lot we didnt know, including the fact that my father had probably been married three times Ray Ban Sale before he met Mother, and apparently had at least two more children. My fathers other son was Nike Air Max 90 identified as Leon Ritzenthaler, a retired owner of a janitorial service, from northern California. In the http://www.fakeraybansforsale-uk.com article, he said he had written me during the 92 campaign but had received no reply. Cheap Oakley Sunglasses Sale I dont remember hearing about his letter, and considering all the other bullets we were dodging fake ray bans for sale then, its possible that my staff kept it from me. Or maybe the letter was just Ray Ban Sunglasses UK misplaced in the mountains of mail we were receiving. Anyway, when I read about Leon, I Air Max 90 got in touch with him and later met him and his wife, Judy, during one of coach.com my stops in northern California. We had a happy visit and since then weve corresponded in buy oakley sunglasses 2015 holiday seasons. He and I look alike, his birth certificate says his father was mine, and cheap oakley sunglasses wholesale I wish Id known about him a long time ago. Somewhere around this time, I also received Michael Kors sale information confirming news stories about a daughter, Sharon Pettijohn, born Sharon Lee Blythe in Kansas City coach handbags in 1941, to a woman my father later divorced. She sent copies of her birth certificate, oakley outlet her parents marriage license, a photo of my father, and a letter to her mother from Air Jordan my father asking about our baby to Betsey Wright, my former chief of staff in the ray-ban sunglasses governors office. Im sorry to say that, for whatever reason, Ive never met her. This news breaking ralph lauren bedding in 1993 came as a shock to Mother, who by then had been battling cancer for nike air max shoes store some time, but she took it all in stride. She said young people did a lot Louboutin Online of things during the Depression and the war that people in another time might disapprove of. Michael Kors online What mattered was that my father was the love of her life and she had no air jordan 5 doubt of his love for her. Whatever the facts, thats all she needed to know as ray ban 4147 review sunglasses her own life moved toward its end. As for me, I wasnt quite sure what to nike air max make of it all, but given the life Ive led, I could hardly be surprised that oakley sunglsses my father was more complicated than the idealized pictures I had lived with for nearly half raybans.com a century. In 1994, as we headed for the celebration of the fiftieth anniversary of D-day, several ray ban usa online store newspapers published a story on my fathers war record, with a snapshot of him in uniform. http://www.2014cheapraybanoutletstores.com Shortly afterward, I received a letter from Umberto Baron of Netcong, New Jersey, recounting his own discount oakley sunglasses experiences during the war and after. He said that he was a young boy in Italy oakley sunglasses outlet store when the Americans arrived, and that he loved to go to their camp, where one soldier coach purses outlet online 2014 in particular befriended him, giving him candy and showing him how engines worked and how to Michael Kors bags factory outlet repair them. He knew him only as Bill. After the war, Baron came to the United States, and, inspired by what he had learned from the soldier who called him Little GI Joe, he opened his own garage and started a family. He told me he had lived the American dream, with a thriving business and three children. He said he owed so much of his success in life to that young soldier, but hadnt had the oakley sunglasses opportunity to say good-bye then, and had often wondered what had happened to him. Then, he said, On Memorial Day of this year, I was thumbing through a copy of the New York Daily News oakley sunglasses sale with my morning coffee when suddenly I felt as if I was struck by lightning. There cheap nike air max ltd in the lower left-hand corner of the paper was a photo of Bill. I felt chills nike.com to learn that Bill was none other than the father of the President of the United http://www.cheapralphlaurenonlinestoreuk.com States. In 1996, the children of one of my fathers sisters came for the first time to http://www.oakleysunglasses-outletstore.com our annual family Christmas party at the White House and brought me a gift: the condolence sale ray ban sunglasses online letter my aunt had received from her congressman, the great Sam Rayburn, after my father died. Air Jordan 4 Cement Its just a short form letter and appears to have been signed with the autopen of 2014 cheap ray ban outlet stores the day, but I hugged that letter with all the glee of a six-year-old boy getting ray ban sunglusses his first train set from Santa Claus. I hung it in my private office on the christian louboutin second floor of the White House, and looked at it every night. Shortly after I left the nike air max 1 essential White House, I was boarding the USAir shuttle in Washington for New York when an airline Michael Kors wallets on sale employee stopped me to say that his stepfather had just told him he had served in air jordan the war with my father and had liked him very much. I asked for the old Air Max vets phone number and address, and the man said he didnt have it but would get air max ltd it to me. Im still waiting, hoping there will be one more human connection to my nike air max 1 essential father. At the end of my presidency, I picked a few special places to say goodbye and ray ban sunglasses sale thanks to the American people. One of them was Chicago, where Hillary was born; where I air jordan 5 retro all but clinched the Democratic nomination on St. Patricks Day 1992; where many of my most new coach handbags ardent supporters live and many of my most important domestic initiatives in crime, welfare, and education Air Jordan 11 Retro were proved effective; and, of course, where my parents went to live after the war. I ralph lauren outlet used to joke with Hillary that if my father hadnt lost his life on that rainy air jordan sale Missouri highway, I would have grown up a few miles from her and we probably never cheap coach purse would have met. My last event was in the Palmer House Hotel, scene of the only Michael Kors wallets cheap photo I have of my parents together, taken just before Mother came back to Hope in nike air max 1946. After the speech and the good-byes, I went into a small room where I met Cheap Ralph Lauren UK a woman, Mary Etta Rees, and her two daughters. She told me she had grown up Ray Ban UK and gone to high school with my mother, then had gone north to Indiana to work http://www.michaelkorsbagsfactoryoutlet.com in a war industry, married, stayed, and raised her children. Then she gave me another precious nike air max 1 2014 gift: the letter my twenty-three-year-old mother had written on her birthday to her friend, three weeks cheap louis vuitton neverfull bags after my fathers death, more than fifty-four years earlier. It was vintage Mother. In her beautiful http://www.cheaplouisvuittonneverfullbags.com hand, she wrote of her heartbreak and her determination to carry on: It seemed almost unbelievable nike air max thea premium at the time but you see I am six months pregnant and the thought of our cheap ray bans sunglasses baby keeps me going and really gives me the whole world before me. My mother left me Ray Ban Rb3025 the wedding ring she gave my father, a few moving stories, and the sure knowledge that air max thea she was loving me for him too. My father left me with the feeling that I had authentic nike air jordan shoes to live for two people, and that if I did it well enough, somehow I could Ray Bans make up for the life he should have had. And his memory infused me, at a Cheap Christian Louboutin younger age than most, with a sense of my own mortality. The knowledge that I, too, oakley eyeglasses could die young drove me both to try to drain the most out of every moment cheap air jordan shoes for sale of life and to get on with the next big challenge. Even when I wasnt sure ray ban usa where I was going, I was always in a hurry.
# -*- coding: utf-8 -*- """ This file is part of checkmate, a meta code checker written in Python. Copyright (C) 2015 Andreas Dewes, QuantifiedCode UG This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from __future__ import unicode_literals from base import BaseCommand from collections import defaultdict import sys import os import random import os.path import copy import json import time import pprint import hashlib import logging logger = logging.getLogger(__name__) from checkmate.management.helpers import filter_filenames_by_checkignore from checkmate.lib.code import CodeEnvironment def diff_objects(objects_a,objects_b,key,comparator,with_unchanged = False): """ Returns a "diff" between two lists of objects. :param key: The key that identifies objects with identical location in each set, such as files with the same path or code objects with the same URL. :param comparator: Comparison functions that decides if two objects are identical. """ objects_a_by_key = dict([(key(obj),obj) for obj in objects_a if key(obj)]) objects_b_by_key = dict([(key(obj),obj) for obj in objects_b if key(obj)]) added_objects = [obj for key,obj in objects_b_by_key.items() if key not in objects_a_by_key] deleted_objects = [obj for key,obj in objects_a_by_key.items() if key not in objects_b_by_key] joint_keys = [key for key in objects_a_by_key if key in objects_b_by_key] modified_objects = [objects_b_by_key[key] for key in joint_keys if comparator(objects_a_by_key[key],objects_b_by_key[key]) != 0 ] result = { 'added' : added_objects, 'deleted' : deleted_objects, 'modified' : modified_objects, } if with_unchanged: unchanged_objects = [objects_b_by_key[key] for key in joint_keys if not objects_b_by_key[key] in modified_objects] result['unchanged'] = unchanged_objects return result class Command(BaseCommand): def diff_snapshots(self,code_environment,snapshot_a,snapshot_b): diff = {'snapshot_a' : snapshot_a,'snapshot_b' : snapshot_b,'project' : self.project} def code_object_key(code_object): key = os.path.join(code_object.module_url,code_object.tree_url) return key def code_object_comparator(code_object_a,code_object_b): return code_object_b.hash-code_object_a.hash def file_revision_key(file_revision): return file_revision.path def file_revision_comparator(file_revision_a,file_revision_b): res = 0 if file_revision_a.fr_pk == file_revision_b.fr_pk else -1 return res def issue_key(issue): try: return issue.file_revision.path+":"+issue.analyzer+\ ":"+issue.code+":"+issue.fingerprint except AttributeError: return issue.file_revision.path+":"+issue.analyzer+":"+issue.code def issue_comparator(issue_a,issue_b): if issue_key(issue_a) == issue_key(issue_b): return 0 return -1 file_revisions_a = snapshot_a.get_file_revisions(self.backend) file_revisions_b = snapshot_b.get_file_revisions(self.backend) diff['file_revisions'] = diff_objects(file_revisions_a, file_revisions_b, file_revision_key, file_revision_comparator) #We just generate code objects and issues #for the modified file revisions, to save time when diffing. logger.info("Generating list of modified file revisions...") modified_file_revisions_by_path = {} for fr_type in ('modified','added','deleted'): for fr in diff['file_revisions'][fr_type]: if not fr.path in modified_file_revisions_by_path: modified_file_revisions_by_path[fr.path] = fr logger.info("Generating list of modified issues...") modified_file_revisions_a = [fr for fr in file_revisions_a if fr.path in modified_file_revisions_by_path] modified_file_revisions_b = [fr for fr in file_revisions_b if fr.path in modified_file_revisions_by_path] issues_a = self.backend.filter(self.project.Issue, {'project.pk' : self.project.pk, 'file_revision.pk' : {'$in' : [fr.pk for fr in modified_file_revisions_a]} }) issues_b = self.backend.filter(self.project.Issue, {'project.pk' : self.project.pk, 'file_revision.pk' : {'$in' : [fr.pk for fr in modified_file_revisions_b]} }) logger.info("Diffing issues (%d in A, %d in B)" % (len(issues_a),len(issues_b))) diff['issues'] = diff_objects(issues_a,issues_b,issue_key,issue_comparator) logger.info("Diffing summary...") diff['summary'] = code_environment.diff_summaries(snapshot_a,snapshot_b) diff['summary']['issues'] = {} diff['summary']['file_revisions'] = {} logger.info("Summarizing diffed file revisions and issues...") for key in ('added','modified','deleted'): diff['summary']['file_revisions'][key] = len(diff['file_revisions'][key]) diff['summary']['issues'][key] = code_environment.summarize_issues(diff['issues'][key]) #Add summary to snapshot_b, so that it can be displayed without fetching the diff object return diff def run(self): settings = self.project.get_settings(self.backend) if 'ignore' in settings: checkignore = settings['ignore'] else: checkignore = [] checkignore_filter = lambda filenames : filter_filenames_by_checkignore(filenames, checkignore) logger.info("Getting file revisions...") file_revisions = self.project.get_disk_file_revisions(file_filters = [checkignore_filter], path_filters = [checkignore_filter]) logger.info("%d file revisions" % len(file_revisions)) snapshot = self.project.DiskSnapshot({'created_at' : time.time()}) try: code_environment = CodeEnvironment(file_revisions, settings = settings) self.analyze_snapshot(snapshot, code_environment, save_if_empty = False) except KeyboardInterrupt: raise def generate_diffs(self,code_environment,snapshot_pairs): diffs = [] logger.info("Generating diffs beween %d snapshot pairs..." % len(snapshot_pairs)) for snapshot_a,snapshot_b in snapshot_pairs: logger.info("Generating a diff between snapshots %s and %s" % (snapshot_a.pk, snapshot_b.pk)) diff = self.diff_snapshots(code_environment,snapshot_a,snapshot_b) diffs.append(diff) return diffs def fingerprint_issues(self,file_revision,issues): content = file_revision.get_file_content() lines = content.split("\n") for issue in issues: lines = "\n".join([line for loc in issue.location for line in lines[loc[0][0]:loc[1][0]]]) sha = hashlib.sha1() sha.update(lines) issue.fingerprint = sha.hexdigest() def annotate_file_revisions(self,snapshot,file_revisions): """ We convert various items in the file revision to documents, so that we can easily search and retrieve them... """ annotations = defaultdict(list) def group_issues_by_code(issues): """ We group the issues by code to avoid generating 100s of issues per file... """ issues_for_code = {} for issue in issues: if not issue['code'] in issues_for_code: issues_for_code[issue['code']] = copy.deepcopy(issue) code_issue = issues_for_code[issue['code']] if 'location' in code_issue: del code_issue['location'] if 'data' in code_issue: del code_issue['data'] code_issue['occurences'] = [] code_issue = issues_for_code[issue['code']] issue_data = {} for key in ('location','data'): if key in issue: issue_data[key] = issue[key] code_issue['occurences'].append(issue_data) return issues_for_code.values() for file_revision in file_revisions: for analyzer_name,results in file_revision.results.items(): if 'issues' in results: if len(results['issues']) > 1000: results['issues'] = [ { 'code' : 'TooManyIssues', 'data' : { 'analyzer' : analyzer_name, 'count' : len(results['issues']) }, 'occurences' : [] } ] documents = [] grouped_issues = group_issues_by_code(results['issues']) for issue in grouped_issues: document = self.project.Issue(issue) document.project = self.project document.file_revision = file_revision document.analyzer = analyzer_name documents.append(document) annotations['issues'].extend(documents) del results['issues'] return annotations def analyze_snapshot(self,snapshot,code_environment,save_if_empty = False): logger.info("Analyzing snapshot...") file_revisions = code_environment.file_revisions file_revisions_by_pk = dict([(fr.fr_pk,fr) for fr in file_revisions]) filtered_file_revisions = code_environment.filter_file_revisions(file_revisions) filtered_file_revisions_by_pk = dict([(fr.fr_pk,fr) for fr in filtered_file_revisions]) excluded_file_revisions = [file_revisions_by_pk[pk] for pk in file_revisions_by_pk.keys() if not pk in filtered_file_revisions_by_pk ] logger.info("Excluding %d file revisions" % len(excluded_file_revisions)) file_revisions = filtered_file_revisions file_revisions_by_pk = filtered_file_revisions_by_pk max_file_revisions = 1000 if len(file_revisions) > max_file_revisions: if not 'snapshot_issues' in snapshot: snapshot.snapshot_issues = [] snapshot.snapshot_issues.append({ 'code' : 'TooManyFileRevisions', 'data' : { 'count' : len(file_revisions), 'limit' : max_file_revisions } }) logger.warning("Too many file revisions (%d) in snapshot, truncating at %d" % (len(file_revisions),max_file_revisions)) file_revisions_by_pk = dict(sorted(file_revisions_by_pk.items(), key = lambda x:x[0])[:max_file_revisions]) file_revisions = file_revisions_by_pk.values() existing_file_revisions = list(self.backend.filter(snapshot.FileRevision,{ 'project.pk' : self.project.pk, 'fr_pk' : {'$in' : file_revisions_by_pk.keys()} })) existing_file_revisions_by_pk = dict([(fr.fr_pk,fr) for fr in existing_file_revisions]) new_file_revisions = [file_revision for file_revision in file_revisions if not file_revision.fr_pk in existing_file_revisions_by_pk] file_revisions_dict = {} for file_revision in existing_file_revisions+new_file_revisions: file_revisions_dict[file_revision.path] = file_revision logger.info("Analyzing %d new file revisions (%d are already analyzed)" % ( len(new_file_revisions), len(existing_file_revisions) )) i = 0 snapshot_issues = list(self.backend.filter(self.project.Issue, {'file_revision.pk' : {'$in' : [fr.pk for fr in existing_file_revisions] } })) logger.info("Found %d existing issues..." % len(snapshot_issues)) #We set the project information in the snapshot. snapshot.project = self.project snapshot.file_revisions = [fr.pk for fr in file_revisions_dict.values()] code_environment.env['snapshot'] = snapshot try: while i < len(new_file_revisions): j = i+10 if i+10 < len(new_file_revisions) else len(new_file_revisions) logger.info("Analyzing and saving: %d - %d (%d remaining)" % (i, j, len(new_file_revisions) - i )) file_revisions_slice = new_file_revisions[i:j] analyzed_file_revisions = code_environment.analyze_file_revisions(file_revisions_slice) logger.info("Annotating and saving file revisions...") annotations = self.annotate_file_revisions(snapshot,analyzed_file_revisions) if 'issues' in annotations: snapshot_issues.extend(annotations['issues']) for file_revision in analyzed_file_revisions: self.backend.save(file_revision) self.backend.commit() for issue in annotations['issues']: self.backend.save(issue) self.backend.commit() i+=10 logger.info("Summarizing file revisions...") snapshot.summary = code_environment.summarize(file_revisions_dict.values()) logger.info("Summarizing issues...") snapshot.issues_summary = code_environment.summarize_issues(snapshot_issues) finally: del code_environment.env['snapshot'] snapshot.analyzed = True logger.info("Saving snapshot...") self.backend.save(snapshot) self.backend.commit() logger.info("Done analyzing snapshot %s" % snapshot.pk) return snapshot
Amines in heteroaromatic systems and pharmaceutical intermediates were functionalized through N-methylation with methanol using a palladium-loaded titanium dioxide (Pd/TiO2) photocatalyst. This method provides access to a series of tertiary N-methylamines bearing N-, O-, and/or S-containing heteroaromatic functionalities from primary/secondary amines and methanol under mild reaction conditions. Facile syntheses of several pharmaceuticals containing N-methyl or ethyl groups, as well as related deuterated drugs, was achieved through the late-stage functionalization of amines.