text stringlengths 38 1.54M |
|---|
def my_sum(a, b):
return a + b
def my_max(a, b):
if a > b:
return a
else:
return b
|
"""
Given an Array, Find the maximum sun sub-array
I/P:
a = [1, 2, 5, -7, 1, 2]
O/P:
[1, 2, 5]
Explanation:
sum of sub array [1, 2, 5] is 8 and it is maximum sum of all sub-array of given array.
"""
def get_max_sum_subarray(arr):
maxi = 0
element_sum = 0
main_max = 0
temp = []
for i in arr:
element_sum += i
print(maxi, element_sum)
if maxi < element_sum:
maxi = element_sum
main_max = maxi
temp.append(i)
elif maxi > element_sum:
element_sum = 0
# maxi = 0
temp = []
# print(maxi, element_sum)
return main_max, temp
if __name__ == "__main__":
a = [-13, -3, -25, -20, -3, -16, -23, -12, -5, -22, -15, -4, -7]
print(get_max_sum_subarray(a))
|
def position(liste:list,element:int) -> int:
"""Fonction qui renvoie l'index d'un élément dans une liste
Args:
liste (list): Liste dans laquelle sera fait la recherche
element (int): Element a chercher
Returns:
int: index de l'élement ou -1 si pas présent
"""
res = -1
if len(liste) > 0:
i = -1
for el in liste:
if el == element:
res = element
i+=1
return i
def nb_occurence(lst:list,element:int) ->int:
"""Fonction retournant le nombre d'occurence element dans la liste lst """
nombre_occurence = 0
for index,value in enumerate(lst):
if(value==element):
nombre_occurence +=1
return nombre_occurence
def est_triee(liste:list) -> bool:
"""Retourne True si liste est triée par ordre croissant
Args:
liste (list): liste d'entier passés en paramètres
Returns:
bool: Retourne True si triée par ordre croissant False sinon
"""
listeEstTriee = True
i = 1
if(liste == []):
listeEstTriee = False
while i < len(liste) and listeEstTriee:
if liste[i-1] > liste[i]:
listeEstTriee = False
i+=1
return listeEstTriee
def position_tri(lst:list,element:int)->int:
""" fonction permettant de chercher un nombre element sur une liste triée
Args:
lst (list): liste triée
element (int) : nombre a chercher
Returns:
int: index ou l element se trouve
"""
debut =0
fin = len(lst)-1
trouve = False
index = 0
if(est_triee(lst)):
while(not trouve and debut<=fin):
index = int((debut+fin)/2)
if(lst[index] == element):
trouve = True
elif(element > lst[index]):
debut = index + 1
else:
fin = index-1
if(not trouve):
return -1
return index
def a_repetitions(liste:list) -> bool:
"""Indique si il y a répétition dans la liste
Args:
liste (list): liste a verifier
Returns:
bool: retourne True si répétition, False sinon
"""
res = False
flag = [] # Liste qui va recevoir les éléments de la liste en paramètre
i = 0
while i < len(liste) and not res:
if not liste[i] in flag:
flag.append(liste[i])
else: # Il y a répétition
res = True
i+=1
return res |
"""Functions for interacting with the RSCB PDB web services."""
import requests
import xml.etree.ElementTree as ElementTree
import molecupy
ROOT_URL = "http://www.rcsb.org/pdb/rest/"
advanced_search_xml = """<orgPdbQuery>
<queryType>org.pdb.query.simple.%s</queryType>
%s
</orgPdbQuery>"""
def query_rcsb(query_type, criteria):
"""Queries the RSCB PDB web services with a simple GET request.
:param str query_type: The type of query to make
:param str criteria: The criteria for this query
:rtype: ``ElementTree`` XML element"""
param_string = "&".join(["%s=%s" % (key, criteria[key]) for key in criteria])
response = requests.get(
"%s%s?%s" % (ROOT_URL, query_type, param_string)
)
if "xml" in response.headers["Content-Type"]:
return ElementTree.fromstring(response.text)
else:
return None
def query_rcsb_advanced(query_type, criteria):
"""Queries the RSCB PDB web services as an advanced search with a POST request.
:param str query_type: The type of query to make
:param str criteria: The criteria for this query
:returns: list of ``str`` PDB codes"""
param_elements = "\n".join(["<%s>%s</%s>" % (key, criteria[key], key) for key in criteria])
query_xml = advanced_search_xml % (query_type, param_elements)
response = requests.post(
"%ssearch" % ROOT_URL,
data=query_xml.encode(),
headers={"Content-Type": "application/x-www-form-urlencoded"}
)
if "problem" not in response.text.lower() and "null" not in response.text.lower():
return response.text.split()
else:
return None
def ask_about_molecupy(func):
"""A decorator which, when applied to a function, will add a 'as_molecupy'
keyword argument - if set to True this will convert any PDB codes the
function returns to `molecuPy <http://molecupy.readthedocs.io>`_ PDB objects."""
def new_func(*args, as_molecupy=False, **kwargs):
pdbs = func(*args, **kwargs)
if as_molecupy:
return [molecupy.get_pdb_remotely(pdb) for pdb in pdbs]
else:
return pdbs
new_func.__name__ = func.__name__
new_func.__doc__ = func.__doc__
return new_func
|
# some guidance from https://sourceforge.net/p/raspberry-gpio-python/wiki/Inputs/
# and http://henrysbench.capnfatz.com/henrys-bench/arduino-sensors-and-input/arduino-hc-sr501-motion-sensor-tutorial/
from subprocess import call
def screen_on(value):
call(["vcgencmd", "display_power", "1" if value else "0"])
try:
import threading
import RPi.GPIO as GPIO
import time
channel = 8
t = 0
def pir_activity(channel):
global t
value = GPIO.input(channel)
print('PIR callback: {}'.format(value))
if value == True:
if t != 0:
t.cancel()
screen_on(True)
else:
t = threading.Timer(60.0, screen_on, [False])
t.start()
GPIO.setmode(GPIO.BOARD)
# bouncetime is in milliseconds
# internet sources generally consider this part to be 'open collector' which
# needs a pullup. Some say the pullup is built-in, however
GPIO.setup(channel, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(channel, GPIO.BOTH, callback=pir_activity, bouncetime=250)
except ImportError:
print('not attempting GPIO/screen blank')
|
from .constants import APPLICATION_NAME, APPLICATION_PASSWORD, MERCHANT_CODE, REFENCE_NUMBER_PREFIX
from datetime import datetime
import random
"""
These are static util function which are implemented to create request dicts for each api service with given parameters
These functions are related to api's payment operation
"""
def create_request_header(client_ip):
return {
"applicationName": APPLICATION_NAME,
"applicationPwd": APPLICATION_PASSWORD,
"clientIPAddress": client_ip,
"transactionDateTime": datetime.now().strftime('%Y%m%d%H%M%S%f')[:-3],
"transactionId": random.randrange(1000000000000000000, 99999999999999999999),
}
def create_provision_request(card_id, card_token, msisdn, amount, installment_count, currency, payment_type, threed_session_id, client_ip):
header = create_request_header(client_ip)
return {
"requestHeader": header,
"cardId": card_id,
"cardToken": card_token,
"merchantCode": MERCHANT_CODE,
"msisdn": msisdn,
"referenceNumber": REFENCE_NUMBER_PREFIX + header["transactionDateTime"],
"amount": amount,
"installmentCount" : installment_count,
"currency": currency,
"paymentType": payment_type,
"acquirerBankCode": "111",
"threeDSessionId": threed_session_id
}
def create_inquire_provision_request(msisdn, reference_number, client_ip):
header = create_request_header(client_ip)
return {
"requestHeader": header,
"merchantCode": MERCHANT_CODE,
"msisdn": msisdn,
"referenceNumber": REFENCE_NUMBER_PREFIX + header["transactionDateTime"],
"originalReferenceNumber": reference_number
}
def create_reverse_provision_request(msisdn, reference_number, client_ip):
header = create_request_header(client_ip)
return {
"requestHeader": header,
"merchantCode": MERCHANT_CODE,
"msisdn": msisdn,
"referenceNumber": REFENCE_NUMBER_PREFIX + header["transactionDateTime"],
"originalReferenceNumber": reference_number
}
def create_refund_provision_request(msisdn, reference_number, amount, client_ip):
header = create_request_header(client_ip)
return {
"requestHeader": header,
"merchantCode": MERCHANT_CODE,
"msisdn": msisdn,
"amount": amount,
"referenceNumber": REFENCE_NUMBER_PREFIX + header["transactionDateTime"],
"originalReferenceNumber": reference_number
}
#
# Only one of cardId, and cardToken must have value for getting three d session id,
# However, in provision service both of them can have value (case: paying with stored card + cvc)
#
def create_threed_session_request(msisdn, amount, card_id, card_token, client_ip):
header = create_request_header(client_ip)
return {
"requestHeader": header,
"merchantCode": MERCHANT_CODE,
"msisdn": msisdn,
"amount": amount,
"target": "MERCHANT",
"transactionType": "AUTH",
"cardId": card_id,
"cardToken": card_token
}
def create_start_threed_session_request(session_id):
return {
"threeDSessionId": session_id,
"callbackurl": "localhost:9090/threedlistener/"
}
def create_threed_session_result_request(msisdn, session_id, client_ip):
return {
"requestHeader": create_request_header(client_ip),
"merchantCode": MERCHANT_CODE,
"msisdn": msisdn,
"threeDSessionId": session_id
}
def create_summary_reconcile_request(reconciliation_date, total_refund_amount, total_refund_count, total_reverse_amount, total_reverse_count, total_sale_amount, total_sale_count,
total_post_auth_amount, total_post_auth_count, total_post_auth_reverse_amount, total_post_auth_reverse_count, total_pre_auth_amount, total_pre_auth_count,
total_pre_auth_reverse_amount, total_pre_auth_reverse_count, client_ip):
return {
"requestHeader": create_request_header(client_ip),
"merchantCode": MERCHANT_CODE,
"reconciliationDate": reconciliation_date,
"totalRefundAmount": total_refund_amount,
"totalRefundCount": total_refund_count,
"totalReverseAmount": total_reverse_amount,
"totalReverseCount": total_reverse_count,
"totalSaleAmount": total_sale_amount,
"totalSaleCount": total_sale_count,
"totalPostAuthAmount": total_post_auth_amount or None,
"totalPostAuthCount": total_post_auth_count or None,
"totalPostAuthReverseAmount": total_post_auth_reverse_amount or None,
"totalPostAuthReverseCount": total_post_auth_reverse_count or None,
"totalPreAuthAmount": total_pre_auth_amount or None,
"totalPreAuthCount": total_pre_auth_count or None,
"totalPreAuthReverseAmount": total_pre_auth_reverse_amount or None,
"totalPreAuthReverseCount": total_pre_auth_reverse_count or None
}
def create_get_history_request(reconciliation_date, partition_no, client_ip):
return {
"requestHeader": create_request_header(client_ip),
"merchantCode": MERCHANT_CODE,
"partitionNo": partition_no,
"reconciliationDate": reconciliation_date
}
def create_get_terms_of_service_request(client_ip):
return {
"requestHeader": create_request_header(client_ip)
}
|
# Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl.testing import absltest
import tensorflow as tf
from tensorflow_federated.python.core.backends.native import mergeable_comp_compiler
from tensorflow_federated.python.core.impl.execution_contexts import async_execution_context
from tensorflow_federated.python.core.impl.execution_contexts import mergeable_comp_execution_context
from tensorflow_federated.python.core.impl.executor_stacks import executor_factory
from tensorflow_federated.python.core.impl.federated_context import federated_computation
from tensorflow_federated.python.core.impl.federated_context import intrinsics
from tensorflow_federated.python.core.impl.tensorflow_context import tensorflow_computation
from tensorflow_federated.python.core.impl.types import computation_types
from tensorflow_federated.python.core.impl.types import placements
from tensorflow_federated.python.core.impl.types import type_test_utils
def build_whimsy_computation_with_aggregation_and_after(
server_arg_type, clients_arg_type
):
@tensorflow_computation.tf_computation(
server_arg_type.member, clients_arg_type.member
)
def compute_sum(x, y):
return x + y
@federated_computation.federated_computation(
server_arg_type, clients_arg_type
)
def aggregation_comp(server_arg, client_arg):
summed_client_value = intrinsics.federated_sum(client_arg)
return intrinsics.federated_map(
compute_sum, (server_arg, summed_client_value)
)
return aggregation_comp
def build_whimsy_computation_with_before_aggregation_work(
server_arg_type, clients_arg_type
):
@tensorflow_computation.tf_computation(clients_arg_type.member)
def compute_tuple_sum(x):
return x[0] + x[1]
@tensorflow_computation.tf_computation(
server_arg_type.member, clients_arg_type.member[0]
)
def compute_sum(x, y):
return x + y
@federated_computation.federated_computation(
server_arg_type, clients_arg_type
)
def aggregation_comp(server_arg, client_arg):
client_sums = intrinsics.federated_map(compute_tuple_sum, client_arg)
summed_client_value = intrinsics.federated_sum(client_sums)
return intrinsics.federated_map(
compute_sum, (server_arg, summed_client_value)
)
return aggregation_comp
def build_whimsy_computation_with_false_aggregation_dependence(
server_arg_type, clients_arg_type
):
@tensorflow_computation.tf_computation(clients_arg_type.member)
def compute_tuple_sum(x):
return x[0] + x[1]
@tensorflow_computation.tf_computation(
server_arg_type.member, clients_arg_type.member[0]
)
def compute_sum(x, y):
return x + y
@federated_computation.federated_computation
def package_args_as_tuple(x, y):
return [x, y]
@federated_computation.federated_computation(
server_arg_type, clients_arg_type
)
def aggregation_comp(server_arg, client_arg):
client_sums = intrinsics.federated_map(compute_tuple_sum, client_arg)
summed_client_value = intrinsics.federated_sum(client_sums)
broadcast_sum = intrinsics.federated_broadcast(summed_client_value)
# Adding a function call here requires normalization into CDF before
# checking the aggregation-dependence condition.
client_tuple = package_args_as_tuple(client_sums, broadcast_sum)
summed_client_value = intrinsics.federated_sum(client_tuple[0])
return intrinsics.federated_map(
compute_sum, (server_arg, summed_client_value)
)
return aggregation_comp
@tensorflow_computation.tf_computation(tf.int32, tf.int32)
def tf_multiply_int(x, y):
return x * y
@federated_computation.federated_computation(tf.int32, tf.int32)
def return_list(x, y):
return [x, y]
@federated_computation.federated_computation(
computation_types.at_server([tf.int32, tf.int32])
)
def server_placed_mult(arg):
return intrinsics.federated_map(tf_multiply_int, arg)
class MergeableCompCompilerTest(absltest.TestCase):
def setUp(self):
ex_factory = executor_factory.local_cpp_executor_factory(
default_num_clients=0
)
self._mergeable_comp_context = (
mergeable_comp_execution_context.MergeableCompExecutionContext(
[async_execution_context.AsyncExecutionContext(ex_factory)]
)
)
super().setUp()
def _invoke_mergeable_form_on_arg(
self,
mergeable_form: mergeable_comp_execution_context.MergeableCompForm,
arg,
):
return self._mergeable_comp_context.invoke(mergeable_form, arg)
def test_raises_two_dependent_aggregates(self):
@federated_computation.federated_computation(
computation_types.at_server(tf.int32)
)
def dependent_agg_comp(server_arg):
arg_at_clients = intrinsics.federated_broadcast(server_arg)
sum_result = intrinsics.federated_sum(arg_at_clients)
rebroadcast_sum = intrinsics.federated_broadcast(sum_result)
return intrinsics.federated_sum(rebroadcast_sum)
with self.assertRaisesRegex(
ValueError, 'one aggregate dependent on another'
):
mergeable_comp_compiler.compile_to_mergeable_comp_form(dependent_agg_comp)
def test_preserves_python_containers_in_after_merge(self):
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
return_list
)
self.assertIsInstance(
mergeable_form, mergeable_comp_execution_context.MergeableCompForm
)
type_test_utils.assert_types_identical(
mergeable_form.after_merge.type_signature.result,
return_list.type_signature.result,
)
def test_compiles_standalone_tensorflow_computation(self):
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
tf_multiply_int
)
self.assertIsInstance(
mergeable_form, mergeable_comp_execution_context.MergeableCompForm
)
def test_compilation_preserves_semantics_standalone_tf(self):
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
tf_multiply_int
)
expected_zero = self._invoke_mergeable_form_on_arg(mergeable_form, (1, 0))
expected_two = self._invoke_mergeable_form_on_arg(mergeable_form, (1, 2))
expected_six = self._invoke_mergeable_form_on_arg(mergeable_form, (2, 3))
self.assertEqual(expected_zero, 0)
self.assertEqual(expected_two, 2)
self.assertEqual(expected_six, 6)
def test_compiles_simple_noarg_computation(self):
@federated_computation.federated_computation()
def return_server_value():
return intrinsics.federated_value(0, placements.SERVER)
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
return_server_value
)
self.assertIsInstance(
mergeable_form, mergeable_comp_execution_context.MergeableCompForm
)
def test_preserves_semantics_of_noarg_computation(self):
@federated_computation.federated_computation()
def return_server_value():
return intrinsics.federated_value(0, placements.SERVER)
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
return_server_value
)
result = self._invoke_mergeable_form_on_arg(mergeable_form, None)
self.assertEqual(result, 0)
def test_compiles_server_placed_computation(self):
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
server_placed_mult
)
self.assertIsInstance(
mergeable_form, mergeable_comp_execution_context.MergeableCompForm
)
def test_compilation_preserves_semantics_server_placed_computation(self):
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
server_placed_mult
)
expected_zero = self._invoke_mergeable_form_on_arg(mergeable_form, (1, 0))
expected_two = self._invoke_mergeable_form_on_arg(mergeable_form, (1, 2))
expected_six = self._invoke_mergeable_form_on_arg(mergeable_form, (2, 3))
self.assertEqual(expected_zero, 0)
self.assertEqual(expected_two, 2)
self.assertEqual(expected_six, 6)
def test_compiles_computation_with_aggregation_and_after(self):
incoming_comp = build_whimsy_computation_with_aggregation_and_after(
computation_types.at_server(tf.int32),
computation_types.at_clients(tf.int32),
)
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
incoming_comp
)
self.assertIsInstance(
mergeable_form, mergeable_comp_execution_context.MergeableCompForm
)
def test_compilation_preserves_semantics_aggregation_and_after(self):
incoming_comp = build_whimsy_computation_with_aggregation_and_after(
computation_types.at_server(tf.int32),
computation_types.at_clients(tf.int32),
)
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
incoming_comp
)
arg = (100, list(range(100)))
result = self._invoke_mergeable_form_on_arg(mergeable_form, arg)
# Expected result is the sum of all the arguments, IE the sum of all
# integers from 0 to 100, which is 101 * 100 / 2.
self.assertEqual(result, 101 * 100 / 2)
def test_compiles_computation_with_before_aggregation_work(self):
incoming_comp = build_whimsy_computation_with_before_aggregation_work(
computation_types.at_server(tf.int32),
computation_types.at_clients([tf.int32, tf.int32]),
)
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
incoming_comp
)
self.assertIsInstance(
mergeable_form, mergeable_comp_execution_context.MergeableCompForm
)
def test_compiles_computation_with_false_aggregation_dependence(self):
incoming_comp = build_whimsy_computation_with_false_aggregation_dependence(
computation_types.at_server(tf.int32),
computation_types.at_clients([tf.int32, tf.int32]),
)
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
incoming_comp
)
self.assertIsInstance(
mergeable_form, mergeable_comp_execution_context.MergeableCompForm
)
def test_compilation_preserves_semantics_before_agg_work(self):
incoming_comp = build_whimsy_computation_with_before_aggregation_work(
computation_types.at_server(tf.int32),
computation_types.at_clients([tf.int32, tf.int32]),
)
mergeable_form = mergeable_comp_compiler.compile_to_mergeable_comp_form(
incoming_comp
)
arg = (100, [(x, x) for x in range(100)])
result = self._invoke_mergeable_form_on_arg(mergeable_form, arg)
# Expected result is again the sum of all arguments, which in this case is
# 2 * 99 * 100 / 2 + 100
self.assertEqual(result, 99 * 100 + 100)
if __name__ == '__main__':
absltest.main()
|
class Solution(object):
def solveNQueens(self, n):
"""
:type n: int
:rtype: List[List[str]]
"""
# pass attack band because it is hard to do reversely, so storing as global var is bad
# This is smart approach, we store the left/right projection on the first row of each placed queue
# For example, queue on row 2 col 3 plays as if it is left attacking from row 1 col 2
def recur(row, down_project, left_project, right_project):
if row == n:
res.append(["".join(row) for row in board])
return
for i in range(n):
if i not in down_project and i - row not in left_project and i + row not in right_project:
board[row][i] = 'Q'
down_project.add(i)
left_project.add(i - row)
right_project.add(i + row)
recur(row + 1, down_project, left_project, right_project)
board[row][i] = '.'
down_project.remove(i)
left_project.remove(i - row)
right_project.remove(i + row)
res = []
board = [["."] * n for i in range(n)]
recur(0, set(), set(), set())
return res
# Careful Careful, don't early return on line 24. No need to return True/False.
# Return True/False if the question is asking to find one solution
# Smart projection, very good solution by me. Beats 96% |
from controls import *
from send_email import activation_email
import json
class PostForm():
def __init__(self):
self.current = request_info()
self.filled = filled_reqs()
self.leng = len(self.current)
self.leng_2 = len(self.filled)
self.state="normal"
if self.current == [[]]:
self.leng=0
if self.filled == [[]]:
self.leng_2=0
class AdminForm():
def __init__(self):
self.current = request_info()
self.filled = filled_reqs()
self.leng = len(self.current)
self.leng_2 = len(self.filled)
self.state="normal"
if self.current == [[]]:
self.leng=0
if self.filled == [[]]:
self.leng_2=0
def delete_req(self, rid):
del_req(rid)
if self.state == 'filled':
self.filled()
elif self.state == 'unapproved':
self.unapproved()
else:
self.reset()
def invite_member(self, email, code):
activation_email(email, code)
create_pending(email, code)
class AccountForm():
def __init__(self, username):
data = my_info(username)
self.id = data["id"]
self.username = username
self.email = username + "@reed.edu"
self.name = data["name"]
self.admin = data["admin"]
self.approved = data["approved"]
current, filled = my_reqs(username)
if current == None:
self.currentRequests = [[]]
self.currentLen= 0
else:
self.currentRequests = current
self.currentLen= len(current)
if filled == None:
self.filledRequests = [[]]
self.filledLen= 0
else:
self.filledRequests = filled
self.filledLen= len(filled)
def delete_req(self, rid):
del_req(rid)
def changeName(self, newname):
change_name(self.id, newname)
|
import mimetypes
import os
from multiprocessing.pool import ThreadPool
import boto3
SITE_DIR = '_site'
s3 = boto3.client('s3')
def is_woff(path):
return path.endswith('woff') or path.endswith('woff2')
def upload_file(args):
filepath, mime = args
key = filepath.replace(SITE_DIR + '/', '')
print 'Uploading {}...'.format(key)
s3.upload_file(
filepath,
'aws-website-christinanavacom-zkmp7',
key,
ExtraArgs = {
'ContentType': mime,
'ACL': 'public-read',
}
)
if __name__ == '__main__':
# Build up a list of files that will be uploaded to Amazon S3.
to_upload = []
for dirpath, dirnames, filenames in os.walk(SITE_DIR):
for filename in filenames:
if filename == '.DS_Store': continue
path = os.path.join(dirpath, filename)
mime, _ = mimetypes.guess_type(path)
if mime is None:
if is_woff(path):
mime = 'application/x-font-woff'
else:
raise Exception('Could not guess Content-Type for: ' + path)
to_upload.append((path, mime))
# Upload site files to S3 in parallel.
pool = ThreadPool(processes = 10)
pool.map(upload_file, all_files)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'c:\Users\lex\py_proj\survillace_generator\main_window.ui'
#
# Created by: PyQt5 UI code generator 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1153, 801)
MainWindow.setMinimumSize(QtCore.QSize(1153, 801))
MainWindow.setMaximumSize(QtCore.QSize(1153, 825))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName("horizontalLayout")
self.tabPage = QtWidgets.QTabWidget(self.centralwidget)
self.tabPage.setObjectName("tabPage")
self.tab1 = QtWidgets.QWidget()
self.tab1.setObjectName("tab1")
self.cb1 = QtWidgets.QComboBox(self.tab1)
self.cb1.setGeometry(QtCore.QRect(1010, 710, 111, 21))
font = QtGui.QFont()
font.setFamily("宋体")
font.setPointSize(12)
self.cb1.setFont(font)
self.cb1.setObjectName("cb1")
self.cb1.addItem("")
self.cb1.addItem("")
self.cb1.addItem("")
self.horizontalLayoutWidget = QtWidgets.QWidget(self.tab1)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(0, 0, 1131, 701))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.pageLayout1 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.pageLayout1.setContentsMargins(0, 0, 0, 0)
self.pageLayout1.setObjectName("pageLayout1")
self.frame = QtWidgets.QFrame(self.horizontalLayoutWidget)
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.frame_13 = QtWidgets.QFrame(self.frame)
self.frame_13.setGeometry(QtCore.QRect(840, 340, 271, 161))
self.frame_13.setFrameShape(QtWidgets.QFrame.Box)
self.frame_13.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_13.setObjectName("frame_13")
self.label1_4 = QtWidgets.QLabel(self.frame_13)
self.label1_4.setGeometry(QtCore.QRect(10, 10, 251, 141))
self.label1_4.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label1_4.setAlignment(QtCore.Qt.AlignCenter)
self.label1_4.setObjectName("label1_4")
self.pushButton1_4 = QtWidgets.QPushButton(self.frame_13)
self.pushButton1_4.setGeometry(QtCore.QRect(160, 130, 101, 23))
self.pushButton1_4.setObjectName("pushButton1_4")
self.frame_15 = QtWidgets.QFrame(self.frame)
self.frame_15.setGeometry(QtCore.QRect(840, 0, 271, 161))
self.frame_15.setFrameShape(QtWidgets.QFrame.Box)
self.frame_15.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_15.setObjectName("frame_15")
self.label1_2 = QtWidgets.QLabel(self.frame_15)
self.label1_2.setGeometry(QtCore.QRect(13, 4, 251, 151))
self.label1_2.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label1_2.setAlignment(QtCore.Qt.AlignCenter)
self.label1_2.setObjectName("label1_2")
self.pushButton1_2 = QtWidgets.QPushButton(self.frame_15)
self.pushButton1_2.setGeometry(QtCore.QRect(160, 130, 101, 23))
self.pushButton1_2.setObjectName("pushButton1_2")
self.frame1_11 = QtWidgets.QFrame(self.frame)
self.frame1_11.setGeometry(QtCore.QRect(560, 510, 271, 161))
self.frame1_11.setFrameShape(QtWidgets.QFrame.Box)
self.frame1_11.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame1_11.setObjectName("frame1_11")
self.label1_7 = QtWidgets.QLabel(self.frame1_11)
self.label1_7.setGeometry(QtCore.QRect(10, 10, 251, 141))
self.label1_7.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label1_7.setAlignment(QtCore.Qt.AlignCenter)
self.label1_7.setObjectName("label1_7")
self.pushButton1_7 = QtWidgets.QPushButton(self.frame1_11)
self.pushButton1_7.setGeometry(QtCore.QRect(160, 130, 101, 23))
self.pushButton1_7.setObjectName("pushButton1_7")
self.frame_12 = QtWidgets.QFrame(self.frame)
self.frame_12.setGeometry(QtCore.QRect(840, 510, 271, 161))
self.frame_12.setFrameShape(QtWidgets.QFrame.Box)
self.frame_12.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_12.setObjectName("frame_12")
self.label1_8 = QtWidgets.QLabel(self.frame_12)
self.label1_8.setGeometry(QtCore.QRect(10, 10, 251, 141))
self.label1_8.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label1_8.setAlignment(QtCore.Qt.AlignCenter)
self.label1_8.setObjectName("label1_8")
self.pushButton1_8 = QtWidgets.QPushButton(self.frame_12)
self.pushButton1_8.setGeometry(QtCore.QRect(160, 130, 101, 23))
self.pushButton1_8.setObjectName("pushButton1_8")
self.frame_14 = QtWidgets.QFrame(self.frame)
self.frame_14.setGeometry(QtCore.QRect(840, 170, 271, 161))
self.frame_14.setFrameShape(QtWidgets.QFrame.Box)
self.frame_14.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_14.setObjectName("frame_14")
self.label1_3 = QtWidgets.QLabel(self.frame_14)
self.label1_3.setGeometry(QtCore.QRect(10, 10, 251, 141))
self.label1_3.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label1_3.setAlignment(QtCore.Qt.AlignCenter)
self.label1_3.setObjectName("label1_3")
self.pushButton1_3 = QtWidgets.QPushButton(self.frame_14)
self.pushButton1_3.setGeometry(QtCore.QRect(160, 130, 101, 23))
self.pushButton1_3.setObjectName("pushButton1_3")
self.frame1_10 = QtWidgets.QFrame(self.frame)
self.frame1_10.setGeometry(QtCore.QRect(280, 510, 271, 161))
self.frame1_10.setFrameShape(QtWidgets.QFrame.Box)
self.frame1_10.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame1_10.setObjectName("frame1_10")
self.label1_6 = QtWidgets.QLabel(self.frame1_10)
self.label1_6.setGeometry(QtCore.QRect(10, 10, 251, 141))
self.label1_6.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label1_6.setAlignment(QtCore.Qt.AlignCenter)
self.label1_6.setObjectName("label1_6")
self.pushButton1_6 = QtWidgets.QPushButton(self.frame1_10)
self.pushButton1_6.setGeometry(QtCore.QRect(160, 130, 101, 23))
self.pushButton1_6.setObjectName("pushButton1_6")
self.frame_9 = QtWidgets.QFrame(self.frame)
self.frame_9.setGeometry(QtCore.QRect(0, 510, 271, 161))
self.frame_9.setFrameShape(QtWidgets.QFrame.Box)
self.frame_9.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_9.setObjectName("frame_9")
self.label1_5 = QtWidgets.QLabel(self.frame_9)
self.label1_5.setGeometry(QtCore.QRect(10, 10, 251, 141))
self.label1_5.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label1_5.setAlignment(QtCore.Qt.AlignCenter)
self.label1_5.setObjectName("label1_5")
self.pushButton1_5 = QtWidgets.QPushButton(self.frame_9)
self.pushButton1_5.setGeometry(QtCore.QRect(160, 130, 101, 23))
self.pushButton1_5.setObjectName("pushButton1_5")
self.frame_2 = QtWidgets.QFrame(self.frame)
self.frame_2.setGeometry(QtCore.QRect(0, 0, 831, 501))
self.frame_2.setFrameShape(QtWidgets.QFrame.Box)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.label1_1 = QtWidgets.QLabel(self.frame_2)
self.label1_1.setGeometry(QtCore.QRect(10, 10, 811, 481))
self.label1_1.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label1_1.setAlignment(QtCore.Qt.AlignCenter)
self.label1_1.setObjectName("label1_1")
self.pushButton1_1 = QtWidgets.QPushButton(self.frame_2)
self.pushButton1_1.setGeometry(QtCore.QRect(720, 470, 101, 23))
self.pushButton1_1.setObjectName("pushButton1_1")
self.pageLayout1.addWidget(self.frame)
self.progressBar = QtWidgets.QProgressBar(self.tab1)
self.progressBar.setGeometry(QtCore.QRect(0, 710, 941, 20))
font = QtGui.QFont()
font.setFamily("宋体")
font.setPointSize(12)
self.progressBar.setFont(font)
self.progressBar.setProperty("value", 24)
self.progressBar.setObjectName("progressBar")
self.tabPage.addTab(self.tab1, "")
self.tab2 = QtWidgets.QWidget()
self.tab2.setObjectName("tab2")
self.cb2 = QtWidgets.QComboBox(self.tab2)
self.cb2.setGeometry(QtCore.QRect(1010, 710, 101, 21))
font = QtGui.QFont()
font.setFamily("宋体")
font.setPointSize(12)
self.cb2.setFont(font)
self.cb2.setObjectName("cb2")
self.cb2.addItem("")
self.cb2.addItem("")
self.cb2.addItem("")
self.horizontalLayoutWidget_3 = QtWidgets.QWidget(self.tab2)
self.horizontalLayoutWidget_3.setGeometry(QtCore.QRect(0, 0, 1131, 701))
self.horizontalLayoutWidget_3.setObjectName("horizontalLayoutWidget_3")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_3)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.frame_4 = QtWidgets.QFrame(self.horizontalLayoutWidget_3)
self.frame_4.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_4.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_4.setObjectName("frame_4")
self.frame_73 = QtWidgets.QFrame(self.frame_4)
self.frame_73.setGeometry(QtCore.QRect(20, 430, 211, 131))
self.frame_73.setFrameShape(QtWidgets.QFrame.Box)
self.frame_73.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_73.setObjectName("frame_73")
self.label_102 = QtWidgets.QLabel(self.frame_73)
self.label_102.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_102.setAlignment(QtCore.Qt.AlignCenter)
self.label_102.setObjectName("label_102")
self.pushButton_102 = QtWidgets.QPushButton(self.frame_73)
self.pushButton_102.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_102.setObjectName("pushButton_102")
self.frame_18 = QtWidgets.QFrame(self.frame_4)
self.frame_18.setGeometry(QtCore.QRect(20, 10, 211, 131))
self.frame_18.setFrameShape(QtWidgets.QFrame.Box)
self.frame_18.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_18.setObjectName("frame_18")
self.label_11 = QtWidgets.QLabel(self.frame_18)
self.label_11.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_11.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label_11.setAlignment(QtCore.Qt.AlignCenter)
self.label_11.setObjectName("label_11")
self.pushButton_11 = QtWidgets.QPushButton(self.frame_18)
self.pushButton_11.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_11.setObjectName("pushButton_11")
self.frame_74 = QtWidgets.QFrame(self.frame_4)
self.frame_74.setGeometry(QtCore.QRect(680, 430, 211, 131))
self.frame_74.setFrameShape(QtWidgets.QFrame.Box)
self.frame_74.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_74.setObjectName("frame_74")
self.label_103 = QtWidgets.QLabel(self.frame_74)
self.label_103.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_103.setAlignment(QtCore.Qt.AlignCenter)
self.label_103.setObjectName("label_103")
self.pushButton_103 = QtWidgets.QPushButton(self.frame_74)
self.pushButton_103.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_103.setObjectName("pushButton_103")
self.frame_75 = QtWidgets.QFrame(self.frame_4)
self.frame_75.setGeometry(QtCore.QRect(900, 430, 211, 131))
self.frame_75.setFrameShape(QtWidgets.QFrame.Box)
self.frame_75.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_75.setObjectName("frame_75")
self.label_104 = QtWidgets.QLabel(self.frame_75)
self.label_104.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_104.setAlignment(QtCore.Qt.AlignCenter)
self.label_104.setObjectName("label_104")
self.pushButton_104 = QtWidgets.QPushButton(self.frame_75)
self.pushButton_104.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_104.setObjectName("pushButton_104")
self.frame_76 = QtWidgets.QFrame(self.frame_4)
self.frame_76.setGeometry(QtCore.QRect(680, 570, 211, 131))
self.frame_76.setFrameShape(QtWidgets.QFrame.Box)
self.frame_76.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_76.setObjectName("frame_76")
self.label_105 = QtWidgets.QLabel(self.frame_76)
self.label_105.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_105.setAlignment(QtCore.Qt.AlignCenter)
self.label_105.setObjectName("label_105")
self.pushButton_105 = QtWidgets.QPushButton(self.frame_76)
self.pushButton_105.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_105.setObjectName("pushButton_105")
self.frame_77 = QtWidgets.QFrame(self.frame_4)
self.frame_77.setGeometry(QtCore.QRect(900, 10, 211, 131))
self.frame_77.setFrameShape(QtWidgets.QFrame.Box)
self.frame_77.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_77.setObjectName("frame_77")
self.label_106 = QtWidgets.QLabel(self.frame_77)
self.label_106.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_106.setAlignment(QtCore.Qt.AlignCenter)
self.label_106.setObjectName("label_106")
self.pushButton_106 = QtWidgets.QPushButton(self.frame_77)
self.pushButton_106.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_106.setObjectName("pushButton_106")
self.frame_78 = QtWidgets.QFrame(self.frame_4)
self.frame_78.setGeometry(QtCore.QRect(460, 150, 211, 131))
self.frame_78.setFrameShape(QtWidgets.QFrame.Box)
self.frame_78.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_78.setObjectName("frame_78")
self.label_107 = QtWidgets.QLabel(self.frame_78)
self.label_107.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_107.setAlignment(QtCore.Qt.AlignCenter)
self.label_107.setObjectName("label_107")
self.pushButton_107 = QtWidgets.QPushButton(self.frame_78)
self.pushButton_107.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_107.setObjectName("pushButton_107")
self.frame_79 = QtWidgets.QFrame(self.frame_4)
self.frame_79.setGeometry(QtCore.QRect(240, 150, 211, 131))
self.frame_79.setFrameShape(QtWidgets.QFrame.Box)
self.frame_79.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_79.setObjectName("frame_79")
self.label_108 = QtWidgets.QLabel(self.frame_79)
self.label_108.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_108.setAlignment(QtCore.Qt.AlignCenter)
self.label_108.setObjectName("label_108")
self.pushButton_108 = QtWidgets.QPushButton(self.frame_79)
self.pushButton_108.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_108.setObjectName("pushButton_108")
self.frame_80 = QtWidgets.QFrame(self.frame_4)
self.frame_80.setGeometry(QtCore.QRect(20, 570, 211, 131))
self.frame_80.setFrameShape(QtWidgets.QFrame.Box)
self.frame_80.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_80.setObjectName("frame_80")
self.label_109 = QtWidgets.QLabel(self.frame_80)
self.label_109.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_109.setAlignment(QtCore.Qt.AlignCenter)
self.label_109.setObjectName("label_109")
self.pushButton_109 = QtWidgets.QPushButton(self.frame_80)
self.pushButton_109.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_109.setObjectName("pushButton_109")
self.frame_81 = QtWidgets.QFrame(self.frame_4)
self.frame_81.setGeometry(QtCore.QRect(900, 150, 211, 131))
self.frame_81.setFrameShape(QtWidgets.QFrame.Box)
self.frame_81.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_81.setObjectName("frame_81")
self.label_110 = QtWidgets.QLabel(self.frame_81)
self.label_110.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_110.setAlignment(QtCore.Qt.AlignCenter)
self.label_110.setObjectName("label_110")
self.pushButton_110 = QtWidgets.QPushButton(self.frame_81)
self.pushButton_110.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_110.setObjectName("pushButton_110")
self.frame_82 = QtWidgets.QFrame(self.frame_4)
self.frame_82.setGeometry(QtCore.QRect(680, 10, 211, 131))
self.frame_82.setFrameShape(QtWidgets.QFrame.Box)
self.frame_82.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_82.setObjectName("frame_82")
self.label_111 = QtWidgets.QLabel(self.frame_82)
self.label_111.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_111.setAlignment(QtCore.Qt.AlignCenter)
self.label_111.setObjectName("label_111")
self.pushButton_111 = QtWidgets.QPushButton(self.frame_82)
self.pushButton_111.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_111.setObjectName("pushButton_111")
self.frame_83 = QtWidgets.QFrame(self.frame_4)
self.frame_83.setGeometry(QtCore.QRect(460, 290, 211, 131))
self.frame_83.setFrameShape(QtWidgets.QFrame.Box)
self.frame_83.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_83.setObjectName("frame_83")
self.label_112 = QtWidgets.QLabel(self.frame_83)
self.label_112.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_112.setAlignment(QtCore.Qt.AlignCenter)
self.label_112.setObjectName("label_112")
self.pushButton_112 = QtWidgets.QPushButton(self.frame_83)
self.pushButton_112.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_112.setObjectName("pushButton_112")
self.frame_84 = QtWidgets.QFrame(self.frame_4)
self.frame_84.setGeometry(QtCore.QRect(460, 10, 211, 131))
self.frame_84.setFrameShape(QtWidgets.QFrame.Box)
self.frame_84.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_84.setObjectName("frame_84")
self.label_113 = QtWidgets.QLabel(self.frame_84)
self.label_113.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_113.setAlignment(QtCore.Qt.AlignCenter)
self.label_113.setObjectName("label_113")
self.pushButton_113 = QtWidgets.QPushButton(self.frame_84)
self.pushButton_113.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_113.setObjectName("pushButton_113")
self.frame_85 = QtWidgets.QFrame(self.frame_4)
self.frame_85.setGeometry(QtCore.QRect(680, 290, 211, 131))
self.frame_85.setFrameShape(QtWidgets.QFrame.Box)
self.frame_85.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_85.setObjectName("frame_85")
self.label_114 = QtWidgets.QLabel(self.frame_85)
self.label_114.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_114.setAlignment(QtCore.Qt.AlignCenter)
self.label_114.setObjectName("label_114")
self.pushButton_114 = QtWidgets.QPushButton(self.frame_85)
self.pushButton_114.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_114.setObjectName("pushButton_114")
self.frame_86 = QtWidgets.QFrame(self.frame_4)
self.frame_86.setGeometry(QtCore.QRect(240, 430, 211, 131))
self.frame_86.setFrameShape(QtWidgets.QFrame.Box)
self.frame_86.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_86.setObjectName("frame_86")
self.label_115 = QtWidgets.QLabel(self.frame_86)
self.label_115.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_115.setAlignment(QtCore.Qt.AlignCenter)
self.label_115.setObjectName("label_115")
self.pushButton_115 = QtWidgets.QPushButton(self.frame_86)
self.pushButton_115.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_115.setObjectName("pushButton_115")
self.frame_87 = QtWidgets.QFrame(self.frame_4)
self.frame_87.setGeometry(QtCore.QRect(900, 570, 211, 131))
self.frame_87.setFrameShape(QtWidgets.QFrame.Box)
self.frame_87.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_87.setObjectName("frame_87")
self.label_116 = QtWidgets.QLabel(self.frame_87)
self.label_116.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_116.setAlignment(QtCore.Qt.AlignCenter)
self.label_116.setObjectName("label_116")
self.pushButton_116 = QtWidgets.QPushButton(self.frame_87)
self.pushButton_116.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_116.setObjectName("pushButton_116")
self.frame_88 = QtWidgets.QFrame(self.frame_4)
self.frame_88.setGeometry(QtCore.QRect(240, 290, 211, 131))
self.frame_88.setFrameShape(QtWidgets.QFrame.Box)
self.frame_88.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_88.setObjectName("frame_88")
self.label_117 = QtWidgets.QLabel(self.frame_88)
self.label_117.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_117.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label_117.setAlignment(QtCore.Qt.AlignCenter)
self.label_117.setObjectName("label_117")
self.pushButton_117 = QtWidgets.QPushButton(self.frame_88)
self.pushButton_117.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_117.setObjectName("pushButton_117")
self.frame_89 = QtWidgets.QFrame(self.frame_4)
self.frame_89.setGeometry(QtCore.QRect(460, 570, 211, 131))
self.frame_89.setFrameShape(QtWidgets.QFrame.Box)
self.frame_89.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_89.setObjectName("frame_89")
self.label_118 = QtWidgets.QLabel(self.frame_89)
self.label_118.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_118.setAlignment(QtCore.Qt.AlignCenter)
self.label_118.setObjectName("label_118")
self.pushButton_118 = QtWidgets.QPushButton(self.frame_89)
self.pushButton_118.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_118.setObjectName("pushButton_118")
self.frame_90 = QtWidgets.QFrame(self.frame_4)
self.frame_90.setGeometry(QtCore.QRect(20, 150, 211, 131))
self.frame_90.setFrameShape(QtWidgets.QFrame.Box)
self.frame_90.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_90.setObjectName("frame_90")
self.label_119 = QtWidgets.QLabel(self.frame_90)
self.label_119.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_119.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label_119.setAlignment(QtCore.Qt.AlignCenter)
self.label_119.setObjectName("label_119")
self.pushButton_119 = QtWidgets.QPushButton(self.frame_90)
self.pushButton_119.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_119.setObjectName("pushButton_119")
self.frame_91 = QtWidgets.QFrame(self.frame_4)
self.frame_91.setGeometry(QtCore.QRect(460, 430, 211, 131))
self.frame_91.setFrameShape(QtWidgets.QFrame.Box)
self.frame_91.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_91.setObjectName("frame_91")
self.label_120 = QtWidgets.QLabel(self.frame_91)
self.label_120.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_120.setAlignment(QtCore.Qt.AlignCenter)
self.label_120.setObjectName("label_120")
self.pushButton_120 = QtWidgets.QPushButton(self.frame_91)
self.pushButton_120.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_120.setObjectName("pushButton_120")
self.frame_38 = QtWidgets.QFrame(self.frame_4)
self.frame_38.setGeometry(QtCore.QRect(240, 10, 211, 131))
self.frame_38.setFrameShape(QtWidgets.QFrame.Box)
self.frame_38.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_38.setObjectName("frame_38")
self.label_121 = QtWidgets.QLabel(self.frame_38)
self.label_121.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_121.setAlignment(QtCore.Qt.AlignCenter)
self.label_121.setObjectName("label_121")
self.pushButton_121 = QtWidgets.QPushButton(self.frame_38)
self.pushButton_121.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_121.setObjectName("pushButton_121")
self.frame_92 = QtWidgets.QFrame(self.frame_4)
self.frame_92.setGeometry(QtCore.QRect(680, 150, 211, 131))
self.frame_92.setFrameShape(QtWidgets.QFrame.Box)
self.frame_92.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_92.setObjectName("frame_92")
self.label_122 = QtWidgets.QLabel(self.frame_92)
self.label_122.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_122.setAlignment(QtCore.Qt.AlignCenter)
self.label_122.setObjectName("label_122")
self.pushButton_122 = QtWidgets.QPushButton(self.frame_92)
self.pushButton_122.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_122.setObjectName("pushButton_122")
self.frame_30 = QtWidgets.QFrame(self.frame_4)
self.frame_30.setGeometry(QtCore.QRect(20, 290, 211, 131))
self.frame_30.setFrameShape(QtWidgets.QFrame.Box)
self.frame_30.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_30.setObjectName("frame_30")
self.label_123 = QtWidgets.QLabel(self.frame_30)
self.label_123.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_123.setAlignment(QtCore.Qt.AlignCenter)
self.label_123.setObjectName("label_123")
self.pushButton_123 = QtWidgets.QPushButton(self.frame_30)
self.pushButton_123.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_123.setObjectName("pushButton_123")
self.frame_32 = QtWidgets.QFrame(self.frame_4)
self.frame_32.setGeometry(QtCore.QRect(240, 570, 211, 131))
self.frame_32.setFrameShape(QtWidgets.QFrame.Box)
self.frame_32.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_32.setObjectName("frame_32")
self.label_124 = QtWidgets.QLabel(self.frame_32)
self.label_124.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_124.setAlignment(QtCore.Qt.AlignCenter)
self.label_124.setObjectName("label_124")
self.pushButton_124 = QtWidgets.QPushButton(self.frame_32)
self.pushButton_124.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_124.setObjectName("pushButton_124")
self.frame_93 = QtWidgets.QFrame(self.frame_4)
self.frame_93.setGeometry(QtCore.QRect(900, 290, 211, 131))
self.frame_93.setFrameShape(QtWidgets.QFrame.Box)
self.frame_93.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_93.setObjectName("frame_93")
self.label_125 = QtWidgets.QLabel(self.frame_93)
self.label_125.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_125.setAlignment(QtCore.Qt.AlignCenter)
self.label_125.setObjectName("label_125")
self.pushButton_125 = QtWidgets.QPushButton(self.frame_93)
self.pushButton_125.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_125.setObjectName("pushButton_125")
self.horizontalLayout_3.addWidget(self.frame_4)
self.tabPage.addTab(self.tab2, "")
self.tab3 = QtWidgets.QWidget()
self.tab3.setObjectName("tab3")
self.horizontalLayoutWidget_2 = QtWidgets.QWidget(self.tab3)
self.horizontalLayoutWidget_2.setGeometry(QtCore.QRect(0, 0, 1131, 741))
self.horizontalLayoutWidget_2.setObjectName("horizontalLayoutWidget_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_2)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.frame_3 = QtWidgets.QFrame(self.horizontalLayoutWidget_2)
self.frame_3.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_3.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_3.setObjectName("frame_3")
self.frame_71 = QtWidgets.QFrame(self.frame_3)
self.frame_71.setGeometry(QtCore.QRect(20, 430, 211, 131))
self.frame_71.setFrameShape(QtWidgets.QFrame.Box)
self.frame_71.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_71.setObjectName("frame_71")
self.label_100 = QtWidgets.QLabel(self.frame_71)
self.label_100.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_100.setAlignment(QtCore.Qt.AlignCenter)
self.label_100.setObjectName("label_100")
self.pushButton_100 = QtWidgets.QPushButton(self.frame_71)
self.pushButton_100.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_100.setObjectName("pushButton_100")
self.frame_17 = QtWidgets.QFrame(self.frame_3)
self.frame_17.setGeometry(QtCore.QRect(20, 10, 211, 131))
self.frame_17.setFrameShape(QtWidgets.QFrame.Box)
self.frame_17.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_17.setObjectName("frame_17")
self.label_10 = QtWidgets.QLabel(self.frame_17)
self.label_10.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_10.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label_10.setAlignment(QtCore.Qt.AlignCenter)
self.label_10.setObjectName("label_10")
self.pushButton_10 = QtWidgets.QPushButton(self.frame_17)
self.pushButton_10.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_10.setObjectName("pushButton_10")
self.frame_69 = QtWidgets.QFrame(self.frame_3)
self.frame_69.setGeometry(QtCore.QRect(680, 430, 211, 131))
self.frame_69.setFrameShape(QtWidgets.QFrame.Box)
self.frame_69.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_69.setObjectName("frame_69")
self.label_98 = QtWidgets.QLabel(self.frame_69)
self.label_98.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_98.setAlignment(QtCore.Qt.AlignCenter)
self.label_98.setObjectName("label_98")
self.pushButton_98 = QtWidgets.QPushButton(self.frame_69)
self.pushButton_98.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_98.setObjectName("pushButton_98")
self.frame_55 = QtWidgets.QFrame(self.frame_3)
self.frame_55.setGeometry(QtCore.QRect(900, 430, 211, 131))
self.frame_55.setFrameShape(QtWidgets.QFrame.Box)
self.frame_55.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_55.setObjectName("frame_55")
self.label_82 = QtWidgets.QLabel(self.frame_55)
self.label_82.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_82.setAlignment(QtCore.Qt.AlignCenter)
self.label_82.setObjectName("label_82")
self.pushButton_82 = QtWidgets.QPushButton(self.frame_55)
self.pushButton_82.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_82.setObjectName("pushButton_82")
self.frame_67 = QtWidgets.QFrame(self.frame_3)
self.frame_67.setGeometry(QtCore.QRect(680, 570, 211, 131))
self.frame_67.setFrameShape(QtWidgets.QFrame.Box)
self.frame_67.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_67.setObjectName("frame_67")
self.label_96 = QtWidgets.QLabel(self.frame_67)
self.label_96.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_96.setAlignment(QtCore.Qt.AlignCenter)
self.label_96.setObjectName("label_96")
self.pushButton_96 = QtWidgets.QPushButton(self.frame_67)
self.pushButton_96.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_96.setObjectName("pushButton_96")
self.frame_60 = QtWidgets.QFrame(self.frame_3)
self.frame_60.setGeometry(QtCore.QRect(900, 10, 211, 131))
self.frame_60.setFrameShape(QtWidgets.QFrame.Box)
self.frame_60.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_60.setObjectName("frame_60")
self.label_88 = QtWidgets.QLabel(self.frame_60)
self.label_88.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_88.setAlignment(QtCore.Qt.AlignCenter)
self.label_88.setObjectName("label_88")
self.pushButton_88 = QtWidgets.QPushButton(self.frame_60)
self.pushButton_88.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_88.setObjectName("pushButton_88")
self.frame_65 = QtWidgets.QFrame(self.frame_3)
self.frame_65.setGeometry(QtCore.QRect(460, 150, 211, 131))
self.frame_65.setFrameShape(QtWidgets.QFrame.Box)
self.frame_65.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_65.setObjectName("frame_65")
self.label_94 = QtWidgets.QLabel(self.frame_65)
self.label_94.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_94.setAlignment(QtCore.Qt.AlignCenter)
self.label_94.setObjectName("label_94")
self.pushButton_94 = QtWidgets.QPushButton(self.frame_65)
self.pushButton_94.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_94.setObjectName("pushButton_94")
self.frame_62 = QtWidgets.QFrame(self.frame_3)
self.frame_62.setGeometry(QtCore.QRect(240, 150, 211, 131))
self.frame_62.setFrameShape(QtWidgets.QFrame.Box)
self.frame_62.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_62.setObjectName("frame_62")
self.label_91 = QtWidgets.QLabel(self.frame_62)
self.label_91.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_91.setAlignment(QtCore.Qt.AlignCenter)
self.label_91.setObjectName("label_91")
self.pushButton_91 = QtWidgets.QPushButton(self.frame_62)
self.pushButton_91.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_91.setObjectName("pushButton_91")
self.frame_70 = QtWidgets.QFrame(self.frame_3)
self.frame_70.setGeometry(QtCore.QRect(20, 570, 211, 131))
self.frame_70.setFrameShape(QtWidgets.QFrame.Box)
self.frame_70.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_70.setObjectName("frame_70")
self.label_99 = QtWidgets.QLabel(self.frame_70)
self.label_99.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_99.setAlignment(QtCore.Qt.AlignCenter)
self.label_99.setObjectName("label_99")
self.pushButton_99 = QtWidgets.QPushButton(self.frame_70)
self.pushButton_99.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_99.setObjectName("pushButton_99")
self.frame_61 = QtWidgets.QFrame(self.frame_3)
self.frame_61.setGeometry(QtCore.QRect(900, 150, 211, 131))
self.frame_61.setFrameShape(QtWidgets.QFrame.Box)
self.frame_61.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_61.setObjectName("frame_61")
self.label_90 = QtWidgets.QLabel(self.frame_61)
self.label_90.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_90.setAlignment(QtCore.Qt.AlignCenter)
self.label_90.setObjectName("label_90")
self.pushButton_90 = QtWidgets.QPushButton(self.frame_61)
self.pushButton_90.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_90.setObjectName("pushButton_90")
self.frame_59 = QtWidgets.QFrame(self.frame_3)
self.frame_59.setGeometry(QtCore.QRect(680, 10, 211, 131))
self.frame_59.setFrameShape(QtWidgets.QFrame.Box)
self.frame_59.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_59.setObjectName("frame_59")
self.label_87 = QtWidgets.QLabel(self.frame_59)
self.label_87.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_87.setAlignment(QtCore.Qt.AlignCenter)
self.label_87.setObjectName("label_87")
self.pushButton_87 = QtWidgets.QPushButton(self.frame_59)
self.pushButton_87.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_87.setObjectName("pushButton_87")
self.frame_64 = QtWidgets.QFrame(self.frame_3)
self.frame_64.setGeometry(QtCore.QRect(460, 290, 211, 131))
self.frame_64.setFrameShape(QtWidgets.QFrame.Box)
self.frame_64.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_64.setObjectName("frame_64")
self.label_93 = QtWidgets.QLabel(self.frame_64)
self.label_93.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_93.setAlignment(QtCore.Qt.AlignCenter)
self.label_93.setObjectName("label_93")
self.pushButton_93 = QtWidgets.QPushButton(self.frame_64)
self.pushButton_93.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_93.setObjectName("pushButton_93")
self.frame_54 = QtWidgets.QFrame(self.frame_3)
self.frame_54.setGeometry(QtCore.QRect(460, 10, 211, 131))
self.frame_54.setFrameShape(QtWidgets.QFrame.Box)
self.frame_54.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_54.setObjectName("frame_54")
self.label_81 = QtWidgets.QLabel(self.frame_54)
self.label_81.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_81.setAlignment(QtCore.Qt.AlignCenter)
self.label_81.setObjectName("label_81")
self.pushButton_81 = QtWidgets.QPushButton(self.frame_54)
self.pushButton_81.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_81.setObjectName("pushButton_81")
self.frame_56 = QtWidgets.QFrame(self.frame_3)
self.frame_56.setGeometry(QtCore.QRect(680, 290, 211, 131))
self.frame_56.setFrameShape(QtWidgets.QFrame.Box)
self.frame_56.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_56.setObjectName("frame_56")
self.label_83 = QtWidgets.QLabel(self.frame_56)
self.label_83.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_83.setAlignment(QtCore.Qt.AlignCenter)
self.label_83.setObjectName("label_83")
self.pushButton_83 = QtWidgets.QPushButton(self.frame_56)
self.pushButton_83.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_83.setObjectName("pushButton_83")
self.frame_57 = QtWidgets.QFrame(self.frame_3)
self.frame_57.setGeometry(QtCore.QRect(240, 430, 211, 131))
self.frame_57.setFrameShape(QtWidgets.QFrame.Box)
self.frame_57.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_57.setObjectName("frame_57")
self.label_85 = QtWidgets.QLabel(self.frame_57)
self.label_85.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_85.setAlignment(QtCore.Qt.AlignCenter)
self.label_85.setObjectName("label_85")
self.pushButton_85 = QtWidgets.QPushButton(self.frame_57)
self.pushButton_85.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_85.setObjectName("pushButton_85")
self.frame_52 = QtWidgets.QFrame(self.frame_3)
self.frame_52.setGeometry(QtCore.QRect(900, 570, 211, 131))
self.frame_52.setFrameShape(QtWidgets.QFrame.Box)
self.frame_52.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_52.setObjectName("frame_52")
self.label_79 = QtWidgets.QLabel(self.frame_52)
self.label_79.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_79.setAlignment(QtCore.Qt.AlignCenter)
self.label_79.setObjectName("label_79")
self.pushButton_79 = QtWidgets.QPushButton(self.frame_52)
self.pushButton_79.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_79.setObjectName("pushButton_79")
self.frame_53 = QtWidgets.QFrame(self.frame_3)
self.frame_53.setGeometry(QtCore.QRect(240, 290, 211, 131))
self.frame_53.setFrameShape(QtWidgets.QFrame.Box)
self.frame_53.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_53.setObjectName("frame_53")
self.label_80 = QtWidgets.QLabel(self.frame_53)
self.label_80.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_80.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label_80.setAlignment(QtCore.Qt.AlignCenter)
self.label_80.setObjectName("label_80")
self.pushButton_80 = QtWidgets.QPushButton(self.frame_53)
self.pushButton_80.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_80.setObjectName("pushButton_80")
self.frame_63 = QtWidgets.QFrame(self.frame_3)
self.frame_63.setGeometry(QtCore.QRect(460, 570, 211, 131))
self.frame_63.setFrameShape(QtWidgets.QFrame.Box)
self.frame_63.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_63.setObjectName("frame_63")
self.label_92 = QtWidgets.QLabel(self.frame_63)
self.label_92.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_92.setAlignment(QtCore.Qt.AlignCenter)
self.label_92.setObjectName("label_92")
self.pushButton_92 = QtWidgets.QPushButton(self.frame_63)
self.pushButton_92.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_92.setObjectName("pushButton_92")
self.frame_68 = QtWidgets.QFrame(self.frame_3)
self.frame_68.setGeometry(QtCore.QRect(20, 150, 211, 131))
self.frame_68.setFrameShape(QtWidgets.QFrame.Box)
self.frame_68.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_68.setObjectName("frame_68")
self.label_97 = QtWidgets.QLabel(self.frame_68)
self.label_97.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_97.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label_97.setAlignment(QtCore.Qt.AlignCenter)
self.label_97.setObjectName("label_97")
self.pushButton_97 = QtWidgets.QPushButton(self.frame_68)
self.pushButton_97.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_97.setObjectName("pushButton_97")
self.frame_58 = QtWidgets.QFrame(self.frame_3)
self.frame_58.setGeometry(QtCore.QRect(460, 430, 211, 131))
self.frame_58.setFrameShape(QtWidgets.QFrame.Box)
self.frame_58.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_58.setObjectName("frame_58")
self.label_86 = QtWidgets.QLabel(self.frame_58)
self.label_86.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_86.setAlignment(QtCore.Qt.AlignCenter)
self.label_86.setObjectName("label_86")
self.pushButton_86 = QtWidgets.QPushButton(self.frame_58)
self.pushButton_86.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_86.setObjectName("pushButton_86")
self.frame_37 = QtWidgets.QFrame(self.frame_3)
self.frame_37.setGeometry(QtCore.QRect(240, 10, 211, 131))
self.frame_37.setFrameShape(QtWidgets.QFrame.Box)
self.frame_37.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_37.setObjectName("frame_37")
self.label_78 = QtWidgets.QLabel(self.frame_37)
self.label_78.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_78.setAlignment(QtCore.Qt.AlignCenter)
self.label_78.setObjectName("label_78")
self.pushButton_78 = QtWidgets.QPushButton(self.frame_37)
self.pushButton_78.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_78.setObjectName("pushButton_78")
self.frame_66 = QtWidgets.QFrame(self.frame_3)
self.frame_66.setGeometry(QtCore.QRect(680, 150, 211, 131))
self.frame_66.setFrameShape(QtWidgets.QFrame.Box)
self.frame_66.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_66.setObjectName("frame_66")
self.label_95 = QtWidgets.QLabel(self.frame_66)
self.label_95.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_95.setAlignment(QtCore.Qt.AlignCenter)
self.label_95.setObjectName("label_95")
self.pushButton_95 = QtWidgets.QPushButton(self.frame_66)
self.pushButton_95.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_95.setObjectName("pushButton_95")
self.frame_29 = QtWidgets.QFrame(self.frame_3)
self.frame_29.setGeometry(QtCore.QRect(20, 290, 211, 131))
self.frame_29.setFrameShape(QtWidgets.QFrame.Box)
self.frame_29.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_29.setObjectName("frame_29")
self.label_84 = QtWidgets.QLabel(self.frame_29)
self.label_84.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_84.setAlignment(QtCore.Qt.AlignCenter)
self.label_84.setObjectName("label_84")
self.pushButton_84 = QtWidgets.QPushButton(self.frame_29)
self.pushButton_84.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_84.setObjectName("pushButton_84")
self.frame_31 = QtWidgets.QFrame(self.frame_3)
self.frame_31.setGeometry(QtCore.QRect(240, 570, 211, 131))
self.frame_31.setFrameShape(QtWidgets.QFrame.Box)
self.frame_31.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_31.setObjectName("frame_31")
self.label_89 = QtWidgets.QLabel(self.frame_31)
self.label_89.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_89.setAlignment(QtCore.Qt.AlignCenter)
self.label_89.setObjectName("label_89")
self.pushButton_89 = QtWidgets.QPushButton(self.frame_31)
self.pushButton_89.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_89.setObjectName("pushButton_89")
self.frame_72 = QtWidgets.QFrame(self.frame_3)
self.frame_72.setGeometry(QtCore.QRect(900, 290, 211, 131))
self.frame_72.setFrameShape(QtWidgets.QFrame.Box)
self.frame_72.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_72.setObjectName("frame_72")
self.label_101 = QtWidgets.QLabel(self.frame_72)
self.label_101.setGeometry(QtCore.QRect(0, 0, 211, 131))
self.label_101.setAlignment(QtCore.Qt.AlignCenter)
self.label_101.setObjectName("label_101")
self.pushButton_101 = QtWidgets.QPushButton(self.frame_72)
self.pushButton_101.setGeometry(QtCore.QRect(100, 100, 101, 23))
self.pushButton_101.setObjectName("pushButton_101")
self.horizontalLayout_2.addWidget(self.frame_3)
self.tabPage.addTab(self.tab3, "")
self.tab4 = QtWidgets.QWidget()
self.tab4.setObjectName("tab4")
self.tabPage.addTab(self.tab4, "")
self.tab5 = QtWidgets.QWidget()
self.tab5.setObjectName("tab5")
self.tabPage.addTab(self.tab5, "")
self.tab6 = QtWidgets.QWidget()
self.tab6.setObjectName("tab6")
self.tabPage.addTab(self.tab6, "")
self.horizontalLayout.addWidget(self.tabPage)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1153, 23))
self.menubar.setObjectName("menubar")
self.menu = QtWidgets.QMenu(self.menubar)
self.menu.setObjectName("menu")
MainWindow.setMenuBar(self.menubar)
self.actiondaochu = QtWidgets.QAction(MainWindow)
self.actiondaochu.setObjectName("actiondaochu")
self.menu.addAction(self.actiondaochu)
self.menubar.addAction(self.menu.menuAction())
self.retranslateUi(MainWindow)
self.tabPage.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.cb1.setItemText(0, _translate("MainWindow", "八画面"))
self.cb1.setItemText(1, _translate("MainWindow", "十六画面"))
self.cb1.setItemText(2, _translate("MainWindow", "二十五画面"))
self.label1_4.setText(_translate("MainWindow", "图片4"))
self.pushButton1_4.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label1_2.setText(_translate("MainWindow", "图片2"))
self.pushButton1_2.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label1_7.setText(_translate("MainWindow", "图片7"))
self.pushButton1_7.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label1_8.setText(_translate("MainWindow", "图片8"))
self.pushButton1_8.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label1_3.setText(_translate("MainWindow", "图片3"))
self.pushButton1_3.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label1_6.setText(_translate("MainWindow", "图片6"))
self.pushButton1_6.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label1_5.setText(_translate("MainWindow", "图片5"))
self.pushButton1_5.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label1_1.setText(_translate("MainWindow", "图片1"))
self.pushButton1_1.setText(_translate("MainWindow", "插入 / 更改图片"))
self.tabPage.setTabText(self.tabPage.indexOf(self.tab1), _translate("MainWindow", "监控1#"))
self.cb2.setItemText(0, _translate("MainWindow", "八画面"))
self.cb2.setItemText(1, _translate("MainWindow", "十六画面"))
self.cb2.setItemText(2, _translate("MainWindow", "二十五画面"))
self.label_102.setText(_translate("MainWindow", "图片16"))
self.pushButton_102.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_11.setText(_translate("MainWindow", "图片1"))
self.pushButton_11.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_103.setText(_translate("MainWindow", "图片19"))
self.pushButton_103.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_104.setText(_translate("MainWindow", "图片20"))
self.pushButton_104.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_105.setText(_translate("MainWindow", "图片24"))
self.pushButton_105.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_106.setText(_translate("MainWindow", "图片5"))
self.pushButton_106.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_107.setText(_translate("MainWindow", "图片8"))
self.pushButton_107.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_108.setText(_translate("MainWindow", "图片7"))
self.pushButton_108.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_109.setText(_translate("MainWindow", "图片21"))
self.pushButton_109.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_110.setText(_translate("MainWindow", "图片10"))
self.pushButton_110.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_111.setText(_translate("MainWindow", "图片4"))
self.pushButton_111.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_112.setText(_translate("MainWindow", "图片13"))
self.pushButton_112.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_113.setText(_translate("MainWindow", "图片3"))
self.pushButton_113.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_114.setText(_translate("MainWindow", "图片14"))
self.pushButton_114.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_115.setText(_translate("MainWindow", "图片17"))
self.pushButton_115.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_116.setText(_translate("MainWindow", "图片25"))
self.pushButton_116.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_117.setText(_translate("MainWindow", "图片12"))
self.pushButton_117.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_118.setText(_translate("MainWindow", "图片23"))
self.pushButton_118.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_119.setText(_translate("MainWindow", "图片6"))
self.pushButton_119.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_120.setText(_translate("MainWindow", "图片18"))
self.pushButton_120.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_121.setText(_translate("MainWindow", "图片2"))
self.pushButton_121.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_122.setText(_translate("MainWindow", "图片9"))
self.pushButton_122.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_123.setText(_translate("MainWindow", "图片11"))
self.pushButton_123.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_124.setText(_translate("MainWindow", "图片22"))
self.pushButton_124.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_125.setText(_translate("MainWindow", "图片15"))
self.pushButton_125.setText(_translate("MainWindow", "插入 / 更改图片"))
self.tabPage.setTabText(self.tabPage.indexOf(self.tab2), _translate("MainWindow", "监控#2"))
self.label_100.setText(_translate("MainWindow", "图片16"))
self.pushButton_100.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_10.setText(_translate("MainWindow", "图片1"))
self.pushButton_10.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_98.setText(_translate("MainWindow", "图片19"))
self.pushButton_98.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_82.setText(_translate("MainWindow", "图片20"))
self.pushButton_82.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_96.setText(_translate("MainWindow", "图片24"))
self.pushButton_96.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_88.setText(_translate("MainWindow", "图片5"))
self.pushButton_88.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_94.setText(_translate("MainWindow", "图片8"))
self.pushButton_94.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_91.setText(_translate("MainWindow", "图片7"))
self.pushButton_91.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_99.setText(_translate("MainWindow", "图片21"))
self.pushButton_99.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_90.setText(_translate("MainWindow", "图片10"))
self.pushButton_90.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_87.setText(_translate("MainWindow", "图片4"))
self.pushButton_87.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_93.setText(_translate("MainWindow", "图片13"))
self.pushButton_93.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_81.setText(_translate("MainWindow", "图片3"))
self.pushButton_81.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_83.setText(_translate("MainWindow", "图片14"))
self.pushButton_83.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_85.setText(_translate("MainWindow", "图片17"))
self.pushButton_85.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_79.setText(_translate("MainWindow", "图片25"))
self.pushButton_79.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_80.setText(_translate("MainWindow", "图片12"))
self.pushButton_80.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_92.setText(_translate("MainWindow", "图片23"))
self.pushButton_92.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_97.setText(_translate("MainWindow", "图片6"))
self.pushButton_97.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_86.setText(_translate("MainWindow", "图片18"))
self.pushButton_86.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_78.setText(_translate("MainWindow", "图片2"))
self.pushButton_78.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_95.setText(_translate("MainWindow", "图片9"))
self.pushButton_95.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_84.setText(_translate("MainWindow", "图片11"))
self.pushButton_84.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_89.setText(_translate("MainWindow", "图片22"))
self.pushButton_89.setText(_translate("MainWindow", "插入 / 更改图片"))
self.label_101.setText(_translate("MainWindow", "图片15"))
self.pushButton_101.setText(_translate("MainWindow", "插入 / 更改图片"))
self.tabPage.setTabText(self.tabPage.indexOf(self.tab3), _translate("MainWindow", "监控#3"))
self.tabPage.setTabText(self.tabPage.indexOf(self.tab4), _translate("MainWindow", "监控#4"))
self.tabPage.setTabText(self.tabPage.indexOf(self.tab5), _translate("MainWindow", "监控#5"))
self.tabPage.setTabText(self.tabPage.indexOf(self.tab6), _translate("MainWindow", "监控#6"))
self.menu.setTitle(_translate("MainWindow", "文件"))
self.actiondaochu.setText(_translate("MainWindow", "导出PPT"))
|
def short_circuiting(is_Friend = True,is_User = True):
#Short Circuiting
if is_Friend or is_User:
print("best friends forever") |
import gym
from gym import spaces
import numpy as np
import random
from envs.maze_layouts import maze_layouts
class ContinualParticleMaze():
"""
Particle maze for continual learning. Mazes should be set by a runner class
of the environment; see agent.py for an example.
"""
def __init__(self, grid_name, dense=False):
self.dense = dense
self.dt = 0.1
self.num_collision_steps = 10
self.observation_space = spaces.Box(
np.array([-1, -1, -1, -1]),
np.array([1, 1, 1, 1])
)
self.action_space = spaces.Box(
np.array([-1, -1]), np.array([1, 1])
)
self.x = np.zeros(2)
self.goal = np.zeros(2)
self.reset_grid(grid_name)
def step(self, action):
"""
Action is a clipped dx. Must respect collision with walls.
"""
# If agent is in a wall (shouldn't happen), reset
ind = self.get_index(self.x)
if self.grid[ind[0],ind[1]]:
self.reset_agent()
rewards, costs = self.get_rew(), 0
# Action movement and collision detection
action = np.clip(action, -1, 1)
ddt = self.dt / self.num_collision_steps
for _ in range(self.num_collision_steps):
x_new = self.x + action * ddt
ind = self.get_index(x_new)
# If in wall, back up (reduces problems with learning)
if self.grid[ind[0],ind[1]]:
costs += 1
self.x -= action * ddt
break
else:
self.x = x_new
self.x = np.clip(self.x, -1, 1)
return self.get_obs(), rewards - costs, False, {}
def reset_agent(self, mode=None):
"""
Reset the agent's position (should be used rarely in lifelong
learning).
"""
if self.start_ind is not None:
# Spawn the agent at the start state
self.x = self.get_coords(self.start_ind)
else:
# Spawn the agent not too close to the goal
self.x = self.get_random_pos(self.grid_free_index)
while np.sum(np.square(self.x - self.g[0,:])) < 0.5:
self.x = self.get_random_pos(self.grid_free_index)
def reset_grid(self, grid_name):
"""
Changes the current grid layout, i.e. the walls of the maze. The
agent's position is not reset, unless it would be placed inside of a
wall by the change, in which case it spawns in the set start position.
"""
self.grid = maze_layouts[grid_name]
self.grid = self.grid.replace('\n','')
self.grid_size = int(np.sqrt(len(self.grid)))
GS = [self.grid_size,self.grid_size]
self.grid_chars = (np.array(list(self.grid)) != 'S').reshape(GS)
self.start_ind = np.argwhere(self.grid_chars == False)
# Check if there is a specified start location S
if self.start_ind.shape[0] > 0:
self.start_ind = self.start_ind[0]
else:
self.start_ind = None
# Get the goal location
self.grid_chars = (np.array(list(self.grid)) != 'G').reshape(GS)
self.goal_ind = np.argwhere(self.grid_chars == False)
self.goal_ind = self.goal_ind[0]
self.goal = self.get_coords(self.goal_ind)
self.grid = self.grid.replace('S', ' ')
self.grid = self.grid.replace('G', ' ')
self.grid = (np.array(list(self.grid)) != ' ').reshape(GS)
self.grid_wall_index = np.argwhere(self.grid == True)
self.grid_free_index = np.argwhere(self.grid != True)
# Reset the agent only if it is stuck in the wall
cur_ind = self.get_index(self.x)
if self.grid[cur_ind[0],cur_ind[1]]:
self.reset()
def reset(self):
"""
Only called at initialization of environment (use reset_agent and
reset_goal as needed elsewhere).
"""
self.reset_agent()
return self.get_obs()
def get_obs(self):
"""
Observation is the coordinates of the agent and the goal.
"""
return np.concatenate([self.x, self.goal])
def get_rew(self):
"""
Reward for the agent, based on the current state. Environment supports
dense and sparse reward variants.
"""
if self.dense:
return -np.linalg.norm(self.x - self.goal)
else:
return 1 if np.linalg.norm(self.x - self.goal) < .1 else 0
def get_coords(self, index):
"""
Convert indices of grid into coordinates.
"""
return ((index + 0.5) / self.grid_size) * 2 - 1
def get_index(self, coords):
"""
Convert coordinates to indices of grid.
"""
return np.clip((((coords + 1) * 0.5) * (self.grid_size)) + 0,
0, self.grid_size-1).astype(np.int8)
|
# Generated by Django 2.1.7 on 2020-10-30 18:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('board', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='bb',
options={'ordering': ['-published'], 'verbose_name': 'announcement', 'verbose_name_plural': 'announcements'},
),
migrations.AlterField(
model_name='bb',
name='content',
field=models.TextField(blank=True, null=True, verbose_name='Description'),
),
migrations.AlterField(
model_name='bb',
name='price',
field=models.FloatField(blank=True, null=True, verbose_name='Price'),
),
migrations.AlterField(
model_name='bb',
name='published',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='published'),
),
migrations.AlterField(
model_name='bb',
name='title',
field=models.CharField(max_length=200, verbose_name='Good'),
),
]
|
import numpy as np
from typing import Dict, List
from .document import Document
from .vectorizers import TfidfVectorizer, CountVectorizer
class Comparator:
"""
API used for loading and comparing documents.
"""
def __init__(self, path: str) -> None:
self._doc = self.load_document(path)
self._doc_vector = CountVectorizer().vectorize(self._doc)
self._comp_docs = list()
self._vectorizer = TfidfVectorizer()
def add_document(self, path: str) -> None:
self._comp_docs.append(self.load_document(path))
def load_document(self, path: str) -> Document:
"""
Loads and returns Document from given path.
Args:
path: Path to document.
Returns: Document instance.
"""
return Document(path)
def calculate_vector_magnitude(self, vector: np.ndarray) -> float:
"""
Calculates vector angle.
Args:
vector: Numpy array.
"""
return np.sqrt(vector.dot(vector))
def calculate_vector_angle(self, vector_1: np.ndarray, vector_2: np.ndarray) -> float:
"""
Calculates angle between two vectors.
"""
mag_vec_1 = self.calculate_vector_magnitude(vector_1)
mag_vec_2 = self.calculate_vector_magnitude(vector_2)
return vector_1.dot(vector_2) / (mag_vec_1 * mag_vec_2)
def compare(self, documents: List[Document] = None) -> Dict[str, float]:
"""
Compares selected document again given documents,
returns comparison results as dictionary of document names
and their percentage similarities.
"""
if documents is None:
documents = list()
documents.extend(self._comp_docs)
assert len(documents), "Please provide documents that " \
"you would like to compare your document to."
result = dict()
for comp_doc in iter(documents):
result[comp_doc.name] = self._compare(comp_doc)
return result
def _compare(self, document: Document) -> float:
"""
Calculates TF-IDF for given given documents
and calculates difference (angle) between the two.
Args:
document: Loaded document for comparison.
Returns: Float value representig difference between two documents.
"""
tfidf = self._vectorizer.vectorize(self._doc, [document])
return self.calculate_vector_angle(self._doc_vector, tfidf)
|
# -*- coding: utf-8 -*-
import scrapy
from ganji.items import AutohomeItem_koubei
import time
from scrapy.conf import settings
from scrapy.mail import MailSender
import logging
import json
website ='autohome_koubei'
class CarSpider(scrapy.Spider):
name = website
allowed_domains = ["autohome.com.cn"]
start_urls=['http://k.autohome.com.cn/suva01/', ]
def __init__(self, **kwargs):
# args
super(CarSpider, self).__init__(**kwargs)
# problem report
self.mailer = MailSender.from_settings(settings)
self.counts = 0
self.carnum = 5000000
# Mongo
settings.set('CrawlCar_Num', self.carnum, priority='cmdline')
settings.set('MONGODB_DB', 'koubei', priority='cmdline')
settings.set('MONGODB_COLLECTION', website, priority='cmdline')
# get family list
def parse(self, response):
for typepath in response.xpath('//div[@class="findcont-choose"]/a'):
familydata = dict()
familydata['type'] = typepath.xpath('text()').extract_first() \
if typepath.xpath('text()') else '-'
familydata['typeid'] = typepath.xpath('@href').re('/([\s\S]*?)/')[0] \
if typepath.xpath('/@href').re('/([\s\S]*?)/') else '-'
urlbase = typepath.xpath('@href').extract_first()
url = response.urljoin(urlbase)
yield scrapy.Request(url,meta={'familydata': familydata},callback=self.parse_family)
#get family list
def parse_family(self, response):
for familypath in response.xpath('//ul[@class="list-cont"]/li'):
familydata=dict()
familydata['familyname'] = familypath.xpath('div[@class="cont-name"]/a/text()').extract_first() \
if familypath.xpath('div[@class="cont-name"]/a/text()') else '-'
familydata['familyid'] = familypath.xpath('div[@class="cont-name"]/a/@href').re('\d+')[0] \
if familypath.xpath('div[@class="cont-name"]/a/@href').re('\d+') else '-'
familydata['familycount'] = familypath.xpath('div[@class="c999 cont-text"]/a/text()').extract_first() \
if familypath.xpath('div[@class="c999 cont-text"]/a/text()') else '-'
familydata['familyscore'] = familypath.xpath('div/a/span[@class="red"]/text()').extract_first() \
if familypath.xpath('div/a/span[@class="red"]/text()') else '-'
familydata=dict(familydata,**response.meta['familydata'])
urlbaselist=[familypath.xpath('div[@class="cont-name"]/a/@href').extract_first(),
familypath.xpath('div[@class="cont-name"]/a/@href').extract_first()+'stopselling/',]
for urlbase in urlbaselist:
url=response.urljoin(urlbase)
yield scrapy.Request(url,meta={'familydata':familydata},callback=self.parse_koubei)
# get car infor
def parse_koubei(self, response):
itembase = AutohomeItem_koubei()
####normal infor
itembase['grabtime'] = time.strftime('%Y-%m-%d %X', time.localtime())
itembase['website'] = website
#### familydata
itembase['familyname'] = response.meta['familydata']['familyname']
itembase['familyid'] = response.meta['familydata']['familyid']
itembase['familycount'] = response.meta['familydata']['familycount']
itembase['familyscore'] = response.meta['familydata']['familyscore']
itembase['type'] = response.meta['familydata']['type']
itembase['typeid'] = response.meta['familydata']['typeid']
for mount in response.xpath('//div[@class="mouthcon"]'):
item=AutohomeItem_koubei()
item['carid'] = mount.xpath(u'div/div[@class="mouthcon-cont-left"]/div[@class="choose-con mt-10"]/dl/dt[contains(text(),"车型")]/../dd/a[2]/@href').re('\d+')[0] \
if mount.xpath(u'div/div[@class="mouthcon-cont-left"]/div[@class="choose-con mt-10"]/dl/dt[contains(text(),"车型")]/../dd/a[2]/@href').re('\d+') else '-'
item['salesdesc'] = mount.xpath(u'div/div[@class="mouthcon-cont-left"]/div[@class="choose-con mt-10"]/dl/dt[contains(text(),"车型")]/../dd/a[2]/span/text()').extract_first() \
if mount.xpath(u'div/div[@class="mouthcon-cont-left"]/div[@class="choose-con mt-10"]/dl/dt[contains(text(),"车型")]/../dd/a[2]/span/text()') else '-'
item['user_name'] = mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[@class="usercont-name fn-clear"]/div[@class="name-text"]/p/a[1]/text()').extract_first().strip() \
if mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[@class="usercont-name fn-clear"]/div[@class="name-text"]/p/a[1]/text()') else '-'
item['userid'] = mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[@class="usercont-name fn-clear"]/div[@class="name-text"]/p/a[1]/@href').re('\d+')[0] \
if mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[@class="usercont-name fn-clear"]/div[@class="name-text"]/p/a[1]/@href').re('\d+') else '-'
userlevel = mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[@class="usercont-name fn-clear"]/div[@class="name-text"]/p/a[1]/i/@class').extract_first() \
if mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[@class="usercont-name fn-clear"]/div[@class="name-text"]/p/a[1]/i/@class') else '-'
usercfcar = mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[3]/div[@class="name-text"]/text()').extract_first() \
if mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[3]/div[@class="name-text"]/text()') else '-'
usercfcarid = mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[3]/div[@class="name-text"]/a/@href').extract_first() \
if mount.xpath('div/div[@class="mouthcon-cont-left"]/div[@class="choose-con"]/div/div[3]/div[@class="name-text"]/a/@href') else '-'
url = scrapy.Field()
status = scrapy.Field()
post_date = scrapy.Field()
grabtime = scrapy.Field()
contentid = scrapy.Field()
koubeilevel = scrapy.Field()
ext_dealer = scrapy.Field()
ext_dealerid = scrapy.Field()
ext_fuel = scrapy.Field()
ext_nake_price = scrapy.Field()
ext_purchase_date = scrapy.Field()
ext_purchase_place = scrapy.Field()
ext_purpose = scrapy.Field()
ext_run = scrapy.Field()
mouth_content = scrapy.Field()
mouth_reply_count = scrapy.Field()
mouth_support_count = scrapy.Field()
mouth_view_count = scrapy.Field()
mouth_space = scrapy.Field()
mouth_power = scrapy.Field()
mouth_control = scrapy.Field()
mouth_fuel = scrapy.Field()
mouth_comfort = scrapy.Field()
mouth_appearance = scrapy.Field()
mouth_trim = scrapy.Field()
mouth_cost_efficient = scrapy.Field()
score_space = scrapy.Field()
score_power = scrapy.Field()
score_control = scrapy.Field()
score_fuel = scrapy.Field()
score_comfort = scrapy.Field()
score_appearance = scrapy.Field()
score_trim = scrapy.Field()
score_cost_efficient = scrapy.Field()
#brand
item['brandname'] = response.meta['branddata']['brandname']
item['brandid'] = response.meta['branddata']['brandid']
#factory
item['factoryname'] = response.meta['factorydata']['factoryname']
item['factoryid'] = response.meta['factorydata']['factoryid']
####key info
# item['shopname'] =shop.xpath('ul/li[@class="tit-row"]/a/span/text()').extract_first() \
# if shop.xpath('ul/li[@class="tit-row"]/a/span/text()') else '-'
# item['url']=shop.xpath('ul/li[@class="tit-row"]/a/@href').extract_first() \
# if shop.xpath('ul/li[@class="tit-row"]/a/@href') else '-'
# if shop.xpath('ul/li[@class="tit-row"]/span[@class="green"]'):
# item['shoptype'] = shop.xpath('ul/li[@class="tit-row"]/span[@class="green"]/text()').extract_first() \
# if shop.xpath('ul/li[@class="tit-row"]/span[@class="green"]/text()') else '-'
# item['shopcolor'] = 'green'
# elif shop.xpath('ul/li[@class="tit-row"]/span[@class="blue"]'):
# item['shoptype'] = shop.xpath('ul/li[@class="tit-row"]/span[@class="blue"]/text()').extract_first() \
# if shop.xpath('ul/li[@class="tit-row"]/span[@class="blue"]/text()') else '-'
# item['shopcolor'] = 'blue'
# item['shopstar'] = str(int(shop.xpath('ul/li[@class="tit-row"]/span[@class="icon star"]/i/@style').re('\d+')[0])*5/100) \
# if shop.xpath('ul/li[@class="tit-row"]/span[@class="icon star"]/i/@style').re('\d+') else '-'
# ####salemodel and saleprice
# item['modelnumber']=str(shop.xpath('ul/li[2]/a/text()').re('\d+')[0]) \
# if shop.xpath('ul/li[2]/a/text()').re('\d+') else '-'
# item['mainbrands'] = '-'.join(shop.xpath('ul/li[2]/em/text()').extract()) \
# if shop.xpath('ul/li[2]/em/text()') else '-'
# item['tel'] = shop.xpath('ul/li[3]/span[@class="tel"]/text()').extract_first() \
# if shop.xpath('ul/li[3]/span[@class="tel"]/text()') else '-'
# item['saleregion'] = shop.xpath('ul/li[3]/span[@class="sale-whole"]/span/text()').extract_first() \
# if shop.xpath('ul/li[3]/span[@class="sale-whole"]/span/text()') else '-'
# item['priceurl']=shop.xpath('ul/li[2]/a/@href').extract_first() \
# if shop.xpath('ul/li[2]/a/@href') else '-'
# ####location
# item['location']=shop.xpath('ul/li[4]/span/text()').extract_first().replace(u'址:','') \
# if shop.xpath('ul/li[4]/span/text()') else '-'
# item['locationurl'] = shop.xpath('ul/li[4]/a/@href').extract_first() \
# if shop.xpath('ul/li[4]/a/@href') else '-'
# ####promotion
# item['promotion'] = shop.xpath('ul/li[5]/a/text()').extract_first() \
# if shop.xpath('ul/li[5]/a/text()') else '-'
# item['promotionurl'] = shop.xpath('ul/li[5]/a/@href').extract_first() \
# if shop.xpath('ul/li[5]/a/text()') else '-'
# ####img
# item['imageurl'] = shop.xpath('a/img/@src').extract_first() \
# if shop.xpath('a/img/@src') else '-'
###normal info
item['status'] = item['url']
yield item
pagenext=response.xpath(u'//a[contains(text(),"下一页")]/@href').extract_first()
if not(pagenext=='javascript:void(0)'):
url=response.urljoin(pagenext)
yield scrapy.Request(url,
meta={'citydata': response.meta['citydata'], 'branddata': response.meta['branddata'],
'factorydata': response.meta['factorydata']},
callback=self.parse_koubei)
kindnext=response.xpath('//div[@class="tab"]/a[@class="nav"]/@href').extract_first()
if kindnext:
if not(kindnext.find('kindId=2')==-1):
url = response.urljoin(kindnext)
yield scrapy.Request(url,
meta={'citydata': response.meta['citydata'], 'branddata': response.meta['branddata'],
'factorydata': response.meta['factorydata']},
callback=self.parse_koubei) |
import sys
ranks = ['2', '3', '4', '5', '6', '7', '8',
'9', '10', 'J', 'Q', 'K', 'A']
class Card:
def __init__(self, card, trump):
self.suit = card[-1]
self.rank = ranks.index(card[:-1])
self.is_trump = self.suit == trump
def draws_with(self, other):
if not self.is_trump and not other.is_trump and self.rank == other.rank:
return True
else:
return False
def __str__(self):
return '{}{}'.format(ranks[self.rank], self.suit)
def beats(self, other):
if self.is_trump and not other.is_trump:
return str(self)
elif not self.is_trump and not other.is_trump:
if self.rank > other.rank:
return str(self)
else:
return str(other)
elif not self.is_trump and other.is_trump:
return str(other)
elif self.is_trump and other.is_trump:
if self.rank > other.rank:
return str(self)
else:
return str(other)
with open(sys.argv[1], 'r') as test_cases:
for test in test_cases:
cards, trump = test.strip().split(' | ')
first, second = cards. split()
c1 = Card(first, trump)
c2 = Card(second, trump)
if c1.draws_with(c2):
print(' '.join([str(c1), str(c2)]))
else:
print(c1.beats(c2))
|
#!/usr/bin/env python
import struct
import Config
from Util import Util
class MsgHead:
parser = struct.Struct('=iibb%dsiii' % (Config.IDLEN + 2))
_fields = ['pos', 'len', 'sent', 'mode', ['id', 1, Config.IDLEN + 2], 'time', 'frompid', 'topid']
#struct msghead
size = parser.size
def unpack(self, str):
Util.Unpack(self, MsgHead.parser.unpack(str))
def pack(self):
return MsgHead.parser.pack(*Util.Pack(self))
def __init__(self, str = None):
if (str == None):
Util.InitStruct(self)
else:
self.unpack(str)
|
from paytmpg.pg.request.ExtraParameterMap import ExtraParameterMap
from paytmpg.pg.utils.stringUtil import make_string, equals
class RefundBaseRequest(ExtraParameterMap):
def __init__(self):
self.mid = None
self.orderId = None
self.refId = None
super(RefundBaseRequest, self).__init__()
def get_mid(self):
return self.mid
def set_mid(self, mid):
self.mid = mid
def get_order_id(self):
return self.orderId
def set_order_id(self, order_id):
self.orderId = order_id
def get_ref_id(self):
return self.refId
def set_ref_id(self, ref_id):
self.refId = ref_id
def __str__(self):
return make_string(self)
def __eq__(self, other):
return equals(self, other)
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Contains functions and class definitinos for supervised training of a
multiclass gaussian model
Created on Wed Nov 30 20:56:56 2016
@author: ibackus
"""
import numpy as np
import utils
class GaussianModel():
"""
GaussianModel() fits multiple gaussians to the given data using a VERY
simple method.
k multivariate normal distributions are fit by looping over the data
in the k classes and fitting a multivariate normal. predictions are
done by returning what the maximum probability
Confidence is defined as (maximum probability)/(sum of probabilities)
for a given data point, which works under the assumption that every
data point belongs to one of the classes.
"""
def __init__(self):
pass
def fit(self, x, y, regScale=0.0):
"""
"""
self.regScale = regScale
self.classNames = np.unique(y)
self.nClass = len(self.classNames)
# Split the xs for all the different classes
xs = [x[y.flatten() == className] for className in self.classNames]
# Generate pdfs for all the gaussians
self.pdfs = [makePDF(xi, regScale=0.0) for xi in xs]
def pred(self, x):
"""
Predict the class of x
"""
probs = self.prob(x)
classInd = probs.argmax(1)
return utils.columnVector(self.classNames[classInd])
def prob(self, x):
"""
Probability of x belonging to the different classes
for P = prob(x),
P[i,j] is prob of x[i] belonging to classj
"""
probs = np.array([pdf(x) for pdf in self.pdfs])
return probs.T
def confidence(self, x):
"""
Confidence is defined as (maximum probability)/(sum of probabilities)
for a given data point, which works under the assumption that every
data point belongs to one of the classes.
"""
P = self.prob(x)
psum = P.sum(1)
pnorm = P.max(1)/P.sum(1)
pnorm[psum == 0] = 0
return utils.columnVector(pnorm)
def multivariateNormalPDF(covar, mean, regScale=0.0):
"""
Generates a function f(x) which is a multivariate normal distribution.
x is assumed to store the vectors along the rows (ie x is a row vector or
a matrix composed of row vectors)
"""
mean = utils.columnVector(mean).T
# Regularize
if regScale != 0:
ind = np.arange(len(covar))
regularization = abs(covar[ind, ind]).mean() * regScale
covar[ind, ind] += regularization
# Calculate terms
covarInv = np.linalg.inv(covar)
det = np.linalg.det(2 * np.pi* covar)
norm = 1./np.sqrt(det)
def PDF(x):
"""
x is assumed to store the vectors along the rows (ie x is a row vector or
a matrix composed of row vectors)
"""
if len(x) > 1:
return np.array([PDF(x1[None, :]) for x1 in x])
return float(norm \
* np.exp(-0.5 * np.dot((x-mean), np.dot(covarInv, (x-mean).T))))
return PDF
def makePDF(x0, regScale=0.0):
"""
"""
center = x0.mean(0)
x0centered = x0 - center
covar = np.dot(x0centered.T, x0centered)/(len(x0) - 1.)
PDF = multivariateNormalPDF(covar, center, regScale)
return PDF |
import unittest
from unittest.mock import patch, MagicMock
import pytest
from adk.src.adk.builtin_steps.automation.execute_script.ebs.create_ebs_volume import CreateEbsVolume
@pytest.mark.unit_test
class TestCreateEbsVolume(unittest.TestCase):
def setUp(self):
self.patcher = patch('boto3.client')
self.client = self.patcher.start()
self.mock_autoscaling = MagicMock()
self.mock_ec2 = MagicMock()
self.side_effect_map = {
'ec2': self.mock_ec2
}
self.client.side_effect = lambda service_name, config=None: self.side_effect_map.get(service_name)
def tearDown(self):
self.patcher.stop()
def test_create_volume_non_gp2(self):
params = {
'CalculateIopsAndVolType.TargetVolumeType': 'MyType',
'CalculateIopsAndVolType.TargetVolumeIOPS': 'MyIops',
'EBSSnapshotIdentifier': 'MySnapshotId',
'TargetAvailabilityZone': 'us-east-1a'
}
self.mock_ec2.create_volume.return_value = {'VolumeId': "ResponseVolumeId"}
CreateEbsVolume().invoke(params)
self.assertEqual('ResponseVolumeId', params['CreateEbsVolume.CreatedVolumeId'])
self.mock_ec2.create_volume.assert_called_with(SnapshotId='MySnapshotId', AvailabilityZone='us-east-1a',
VolumeType='MyType', Iops='MyIops')
def test_create_volume_gp2(self):
params = {
'CalculateIopsAndVolType.TargetVolumeType': 'gp2',
'CalculateIopsAndVolType.TargetVolumeIOPS': 'MyIops',
'EBSSnapshotIdentifier': 'MySnapshotId',
'TargetAvailabilityZone': 'us-east-1a'
}
self.mock_ec2.create_volume.return_value = {'VolumeId': "ResponseVolumeId"}
CreateEbsVolume().invoke(params)
self.assertEqual('ResponseVolumeId', params['CreateEbsVolume.CreatedVolumeId'])
# Iops not included for gp2
self.mock_ec2.create_volume.assert_called_with(SnapshotId='MySnapshotId', AvailabilityZone='us-east-1a',
VolumeType='gp2')
|
import FWCore.ParameterSet.Config as cms
from RecoEgamma.EgammaElectronProducers.gsfElectronSequence_cff import *
from RecoEgamma.EgammaElectronProducers.uncleanedOnlyElectronSequence_cff import *
from RecoEgamma.EgammaPhotonProducers.photonSequence_cff import *
from RecoEgamma.EgammaPhotonProducers.conversionSequence_cff import *
from RecoEgamma.EgammaPhotonProducers.conversionTrackSequence_cff import *
from RecoEgamma.EgammaPhotonProducers.allConversionSequence_cff import *
from RecoEgamma.EgammaPhotonProducers.gedPhotonSequence_cff import *
from RecoEgamma.EgammaIsolationAlgos.egammaIsolationSequence_cff import *
from RecoEgamma.EgammaIsolationAlgos.interestingEgammaIsoDetIdsSequence_cff import *
from RecoEgamma.PhotonIdentification.photonId_cff import *
from RecoEgamma.ElectronIdentification.electronIdSequence_cff import *
from RecoEgamma.EgammaHFProducers.hfEMClusteringSequence_cff import *
from TrackingTools.Configuration.TrackingTools_cff import *
from RecoEgamma.EgammaIsolationAlgos.egmIsolationDefinitions_cff import *
egammaGlobalRecoTask = cms.Task(electronGsfTrackingTask,conversionTrackTask,allConversionTask)
egammaGlobalReco = cms.Sequence(egammaGlobalRecoTask)
# this might be historical: not sure why we do this
from Configuration.Eras.Modifier_fastSim_cff import fastSim
_fastSim_egammaGlobalRecoTask = egammaGlobalRecoTask.copy()
_fastSim_egammaGlobalRecoTask.replace(conversionTrackTask,conversionTrackTaskNoEcalSeeded)
fastSim.toReplaceWith(egammaGlobalRecoTask, _fastSim_egammaGlobalRecoTask)
egammaHighLevelRecoPrePFTask = cms.Task(gsfEcalDrivenElectronTask,uncleanedOnlyElectronTask,conversionTask,photonTask)
egammaHighLevelRecoPrePF = cms.Sequence(egammaHighLevelRecoPrePFTask)
fastSim.toReplaceWith(egammaHighLevelRecoPrePFTask,egammaHighLevelRecoPrePFTask.copyAndExclude([uncleanedOnlyElectronTask,conversionTask]))
egammaHighLevelRecoPostPFTask = cms.Task(interestingEgammaIsoDetIdsTask,egmIsolationTask,photonIDTask,photonIDTaskGED,eIdTask,hfEMClusteringTask)
egammaHighLevelRecoPostPF = cms.Sequence(egammaHighLevelRecoPostPFTask)
from Configuration.Eras.Modifier_pA_2016_cff import pA_2016
from Configuration.Eras.Modifier_peripheralPbPb_cff import peripheralPbPb
from Configuration.ProcessModifiers.pp_on_AA_cff import pp_on_AA
from Configuration.Eras.Modifier_pp_on_XeXe_2017_cff import pp_on_XeXe_2017
from Configuration.Eras.Modifier_ppRef_2017_cff import ppRef_2017
#HI-specific algorithms needed in pp scenario special configurations
from RecoHI.HiEgammaAlgos.photonIsolationHIProducer_cfi import photonIsolationHIProducerpp
from RecoHI.HiEgammaAlgos.photonIsolationHIProducer_cfi import photonIsolationHIProducerppGED
from RecoHI.HiEgammaAlgos.photonIsolationHIProducer_cfi import photonIsolationHIProducerppIsland
_egammaHighLevelRecoPostPF_HITask = egammaHighLevelRecoPostPFTask.copy()
_egammaHighLevelRecoPostPF_HITask.add(photonIsolationHIProducerpp)
_egammaHighLevelRecoPostPF_HITask.add(photonIsolationHIProducerppGED)
_egammaHighLevelRecoPostPF_HITask.add(photonIsolationHIProducerppIsland)
for e in [pA_2016, peripheralPbPb, pp_on_AA, pp_on_XeXe_2017, ppRef_2017]:
e.toReplaceWith(egammaHighLevelRecoPostPFTask, _egammaHighLevelRecoPostPF_HITask)
|
import numpy as np
from vtk_rw import read_vtk, write_vtk
iterations = 1000
hemis = ['rh', 'lh']
mesh_file="/scr/ilz3/myelinconnect/new_groupavg/surfs/lowres/%s_lowres_new.vtk"
t1_file = '/scr/ilz3/myelinconnect/new_groupavg/t1/smooth_3/%s_t1_avg_smooth_3.npy'
mask_file = '/scr/ilz3/myelinconnect/new_groupavg/masks/%s_fullmask_new.npy'
random_npy_file = '/scr/ilz3/myelinconnect/new_groupavg/model/random_data/raw/%s_random_normal_%s.npy'
random_vtk_file = '/scr/ilz3/myelinconnect/new_groupavg/model/random_data/raw/%s_random_normal_%s.vtk'
'''
Create random datasets by drawing from a normal distribution and write them to
group average surface for subsequent smoothing with cbstools.
'''
for hemi in hemis:
print hemi
v,f,d = read_vtk(mesh_file%hemi)
for r in range(iterations):
print r
random_data = np.random.randn(v.shape[0])
np.save(random_npy_file%(hemi, str(r)), random_data)
write_vtk(random_vtk_file%(hemi, str(r)), v, f, data=random_data[:,np.newaxis]) |
from bs4 import BeautifulSoup
import os
import codecs
MAPPING = {
"code":2,
"title":3,
"people":4,
"organization":5
}
def main():
in_dir = r"/Users/liuyang/Desktop/nsfc" # html
out_file = in_dir + os.sep + "out.csv" # csv
htmls = get_all_html(in_dir)
data = []
for h in htmls:
data.extend(get_proj_info_from_html(in_dir + os.sep + h))
# data to csv
write_to_csv(data, out_file)
def get_all_html(dir):
files = os.listdir(dir)
return filter(lambda f: f.endswith(".htm"), files)
def get_proj_info_from_html(h_file):
with codecs.open(h_file, encoding="utf8", mode="r") as f:
soup = BeautifulSoup(f.read())
tables = soup.find_all("table", {"id":"dataGrid"})
if len(tables) <= 0:
return
ret = []
trs = tables[0].find_all("tr")
for tr in trs[1:]:
tds = tr.find_all("td")
one_row = {}
for k, v in MAPPING.items():
one_row[k] = tds[v].string
ret.append(one_row)
return ret
def write_to_csv(data, o_file):
titles = MAPPING.keys()
with codecs.open(o_file, mode="w", encoding="utf8") as f:
# first row
f.write(",".join(titles))
f.write("\n")
for d in data:
strs = []
for t in titles:
strs.append(d[t])
f.write(",".join(strs))
f.write("\n")
f.close()
if __name__ == '__main__':
main() |
def banknotes(values, price):
for i in values:
if i > price:
values.remove(i)
papers = [price + 1] * (1 + max(values))
for i in values:
papers[i] = 1
for i in range(1 + max(values), price + 1):
papers.append(1 + min([papers[i - j] for j in values]))
return -1 if papers[price] > price else papers[price]
values = [30, 1, 14]
price = 29
papers = banknotes(values, price)
print(papers)
|
from setuptools import setup
setup(
name='ferret',
version='1.0',
description='A data collection and analysis tool for Politics & War.',
author='Lyrositor',
packages=['ferret'],
python_requires='>=3.5',
install_requires=['tornado', 'transcrypt', 'sqlalchemy', 'requests',
'torndsession'],
entry_points={
'console_scripts': [
'ferret=ferret.main:main',
],
}
)
|
# -*- coding: utf-8 -*-
from Parser import Parser, Document
from Indexer import IndexerSimple
from Models import ModeleLangue, Okapi
from Query import QueryParser
from Metrics import Precision, Rappel, FMesure, AvgPrecision, ReciprocalRank, NDCG
from Eval import EvalIRModel
import numpy as np
import pandas as pd
#////////////////////////////////// Grid Search avec séparation train/test ///////
def trainTest(data,p):
# Fonction de séparation des données.
# Input : data: dictionnaire {id_doc: objet Query}, p: pourcentage de train
n = len(data) # nombre total de documents
iddocs = list(data.keys()) # identifiants
# Récupérer les identifiants qui serviront de données train
idtrain = np.random.choice(iddocs,int(p*n),replace=False)
# et créer les dictionnaires finaux
train = {}
test = {}
for iddoc in iddocs:
if iddoc in idtrain: train[iddoc] = data[iddoc]
else: test[iddoc] = data[iddoc]
return train, test
#////////////////////////////////// Mise en oeuvre. //////////////////////////////
file = "data/cisi/cisi"
# Création de la collection de documents et de requêtes.
parse = Parser(file+".txt")
coll = parse.getResult()
indexcoll = IndexerSimple(coll)
qp = QueryParser(file+".qry",file+".rel")
queries = qp.getQoll()
# Séparation train-test :
train, test = trainTest(queries,0.8)
avgP = AvgPrecision() # mAP
# Jelinek-Mercer /////////////////////////////////////////////////////////////////
# Grille du paramètre lambda
paramlambda = [0.1*i for i in range(11)] #[0, 0.1,..., 1]
res = []
# Pour chaque modèle testé, on calcule la moyenne d'average precision sur
# l'ensemble des requêtes.
for l in paramlambda:
e = EvalIRModel({"Jelinek": ModeleLangue(indexcoll,l)}, {"avgP": avgP}, train)
res.append(e.evalAll()["avgP"]["Jelinek"][0])
# Estimation du meilleur paramètre.
best = paramlambda[np.argmax(res)]
print("Optimum: {}".format(best))
# Test en validation.
e = EvalIRModel({"Jelinek": ModeleLangue(indexcoll,best)}, {"avgP": avgP}, test)
print("Test en validation: {}".format(e.evalAll()["avgP"]["Jelinek"][0]))
# Okapi BM25 /////////////////////////////////////////////////////////////////////
# Définition des grilles de paramètres (k1 et b)
paramK1 = [0.1*i for i in range(8,19)] #[0.8, 0.9,..., 1.9]
paramb = [0.1*i for i in range(1,11)] #[0.1,..., 1]
res = [] # contiendra les valeurs d'avgP pour tous les modèles testés
liste = [] # contiendra les combinaisons de paramètres
for k1 in paramK1:
for b in paramb:
e = EvalIRModel({"Okapi": Okapi(indexcoll,k1,b)}, {"avgP": avgP}, train)
res.append(e.evalAll()["avgP"]["Okapi"][0])
liste.append((k1,b))
# Estimation du meilleur paramètre.
print(res)
best = liste[np.argmax(res)]
print("Optimum: {}".format(best))
# Test en validation.
e = EvalIRModel({"Okapi": Okapi(indexcoll,best[0],best[1])}, {"avgP": avgP}, test)
print("Test en validation: {}".format(e.evalAll()["avgP"]["Okapi"][0])) |
# Generated by Django 3.1.4 on 2020-12-17 10:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('checkout', '0003_auto_20201215_2000'),
]
operations = [
migrations.RemoveField(
model_name='order',
name='delivery_cost',
),
]
|
import logging
# from conf import settings
# import conf.settings
# from conf import settings
# from bin import atm
import os
import sys
BASE_DIR=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(BASE_DIR)
import logging
from conf import settings
def logger(log_type):
logger=logging.getLogger(log_type)
logger.setLevel(settings.LOG_LEVEL)
|
import io
import sys
import base64
import traceback
import numpy as np
import skimage
import skimage.io
import skimage.segmentation
from skimage.measure import regionprops
class ImageProcessor:
def __init__(self):
pass
def convert_image_b64string_to_ndarray(self, base64_image):
encoded = base64.b64decode(base64_image)
image_array = skimage.io.imread(io.BytesIO(encoded))[:,:,:3] # y,x,channel, if png, drop alpha
return image_array
def produce_segments(self, image_array, n_segments=10, sigma=3):
try:
segments = skimage.segmentation.slic(image_array, n_segments = n_segments, sigma = sigma)
regions = regionprops(segments+1)
centroids = [props.centroid for props in regions]
return {
"segments": segments,
"centroids": centroids
}
except Exception as err:
traceback.print_exception(*sys.exc_info())
def __call__(self, base64_image, n_segments=10, sigma=3):
img_array = self.convert_image_b64string_to_ndarray(base64_image)
result = self.produce_segments(img_array, n_segments, sigma)
return result |
import numpy as np
from SALib.analyze import delta
from SALib.sample import latin
from fishery import fish_game
import matplotlib.pyplot as plt
# Set up dictionary with system parameters
problem = {
'num_vars': 9,
'names': ['a', 'b', 'c', 'd','h',
'K','m','sigmaX','sigmaY'],
'bounds': [[ 0.002, 2],
[0.005, 1],
[0.2, 1],
[0.05, 0.2],
[0.001, 1],
[100, 5000],
[0.1, 1.5],
[0.001, 0.01],
[0.001, 0.01]]
}
defaultvalues = np.array([0.005, 0.5, 0.5, 0.1, 0.1, 2000, 0.7, 0.004, 0.004])
# Generate samples
nsamples = 1000
X_Set1 = latin.sample(problem, nsamples) # This is Set 1
# Run model for all samples
output = [fish_game(*X_Set1[j,:]) for j in range(nsamples)]
# Perform analysis
results = delta.analyze(problem, X_Set1, np.asarray(output), print_to_console=True)
# Sort factors by importance
factors_sorted = np.argsort(results['delta'])[::-1]
# Set up DataFrame of default values to use for experiment
X_defaults = np.tile(defaultvalues,(nsamples, 1))
# Create initial Sets 2 and 3
X_Set2 = np.copy(X_defaults)
X_Set3 = np.copy(X_Set1)
for f in range(1, len(factors_sorted)+1):
ntopfactors = f
for i in range(ntopfactors): #Loop through all important factors
X_Set2[:,factors_sorted[i]] = X_Set1[:,factors_sorted[i]] #Fix use samples for important
X_Set3[:,factors_sorted[i]] = X_defaults[:,factors_sorted[i]] #Fix important to defaults
# Run model for all samples
output_Set2 = [fish_game(*X_Set2[j,:]) for j in range(nsamples)]
output_Set3 = [fish_game(*X_Set3[j,:]) for j in range(nsamples)]
# Calculate coefficients of correlation
coefficient_S1_S2 = np.corrcoef(output,output_Set2)[0][1]
coefficient_S1_S3 = np.corrcoef(output,output_Set3)[0][1]
# Plot outputs and correlation
fig = plt.figure(figsize=(18,12))
ax1 = fig.add_subplot(1,2,1)
ax1.plot(output,output)
ax1.scatter(output,output_Set2)
ax1.set_xlabel("Set 1",fontsize=14)
ax1.set_ylabel("Set 2",fontsize=14)
ax1.tick_params(axis='both', which='major', labelsize=12)
ax1.set_title('Set 1 vs Set 2 - ' + str(f) + ' top factors',fontsize=20)
ax1.text(0.05,0.95,'R= '+"{0:.3f}".format(coefficient_S1_S2),transform = ax1.transAxes,fontsize=16)
ax2 = fig.add_subplot(1,2,2)
ax2.plot(output,output)
ax2.scatter(output,output_Set3)
ax2.set_xlabel("Set 1",fontsize=14)
ax2.set_ylabel("Set 3",fontsize=14)
ax2.tick_params(axis='both', which='major', labelsize=12)
ax2.set_title('Set 1 vs Set 3 - ' + str(f) + ' top factors',fontsize=20)
ax2.text(0.05,0.95,'R= '+"{0:.3f}".format(coefficient_S1_S3),transform = ax2.transAxes,fontsize=16)
plt.savefig(str(f)+'_topfactors.png')
plt.close() |
# Copyright ServiceNow, Inc. 2021 – 2022
# This source code is licensed under the Apache 2.0 license found in the LICENSE file
# in the root directory of this source tree.
import abc
import json
import os
import time
from typing import List, Optional
import h5py
import structlog
from filelock import FileLock
from retrying import RetryError, retry
from azimuth.types import ModuleResponse
from azimuth.utils.conversion import from_pickle_bytes, to_pickle_bytes
log = structlog.get_logger(__name__)
class HDF5FileOpenerWithRetry:
"""Open an HDF5 file with multiple retries.
Args:
args: Forwarded to h5py.File
n_retry: Number of tries to open the file.
sleep: Sleep time between tries in second.
kwargs: Forwarded to h5py.File.
"""
def __init__(
self,
*args,
n_retry=5,
sleep=1,
**kwargs,
):
self.n_retry = n_retry
self.sleep = sleep
self.file = None
for i in range(self.n_retry):
try:
self.file = h5py.File(*args, **kwargs)
break
except OSError as e:
log.warning(f"Issue opening HDF5: {e}. Retrying... {i}/{self.n_retry}")
time.sleep(self.sleep)
else:
raise TimeoutError("Can't open that file.")
def __enter__(self):
return self.file
def __exit__(self, exc_type, exc_val, exc_tb):
if self.file is not None:
self.file.close()
class CachingMechanism(abc.ABC):
"""Parent module for caching mechanism in the application."""
@abc.abstractmethod
def _store_data_in_cache(self, result: List[ModuleResponse], indices: List[int]):
...
@abc.abstractmethod
def _check_cache(self, indices: Optional[List[int]]):
...
@abc.abstractmethod
def _get_cache(self, indices: List[int]):
...
class HDF5CacheMixin(CachingMechanism):
"""HDF5 caching functions to cache the results of modules."""
name: str
_cache_lock: str # Lockfile path
_cache_file: str # Cache file path
_cache_effective_arguments: str # Effective arguments cache file
def get_effective_arguments(self):
raise NotImplementedError
def _store_data_in_cache(self, result: List[ModuleResponse], indices: List[int]):
"""Store `results` in `handle` for some `indices`.
Args:
result (List[Dict]): A list of records to store.
indices (List[int]): A list of indices to map result to.
"""
if len(indices) != len(result):
raise ValueError(
f"Expecting same length for `indices`({len(indices)}) and `results`({len(result)}."
)
with FileLock(self._cache_lock), HDF5FileOpenerWithRetry(
self._cache_file, "w", libver="latest"
) as handle:
handle.swmr_mode = True
for idx, res in zip(indices, result):
arr = to_pickle_bytes(res)
ds = handle.require_dataset(f"{self.name}/{idx}", shape=arr.shape, dtype=arr.dtype)
ds[()] = arr
handle.flush()
# Save all that affects caching so it can be used for identification and debugging.
with open(os.path.join(self._cache_effective_arguments), "w") as f:
json.dump(self.get_effective_arguments().dict(), f, indent=2)
@retry(stop_max_attempt_number=5, wait_fixed=0.5)
def _check_cache_internal(self, indices: Optional[List[int]]):
"""Try to read the cache up to 5 times (in case there are errors)
and see if we have all the indices.
Notes:
Should not be called directly, please refer to _check_cache instead.
Args:
indices: Optional set of indices to verify.
"""
if indices is None or not os.path.exists(self._cache_file):
# Not a cacheable query
return False
with h5py.File(self._cache_file, "r", libver="latest", swmr=True) as handle:
if self.name not in handle:
return False
task_grp = handle[self.name]
return all(str(i) in task_grp for i in indices)
def _check_cache(self, indices: Optional[List[int]]):
"""Check if all indices are in the cache.
This is used to know if we already have the result of a request.
Args:
indices: Optional set of indices to verify.
"""
try:
return self._check_cache_internal(
indices,
)
except (OSError, KeyError, RuntimeError, RetryError) as e:
# NOTE: The file might be corrupted
# so the key can be there, but not the values.
print(f"Error accessing cache in {self.name}", e)
return False
@retry(stop_max_attempt_number=5, wait_fixed=0.5)
def _get_cache(self, indices: List[int]):
"""
Will gather the results of this tasks on some indices.
Note: we fail if indices are not there.
Args:
indices (List[int]): A list of indices to map result to.
Returns:
List[Dict], a list of records for the indices.
"""
with h5py.File(self._cache_file, "r", libver="latest", swmr=True) as handle:
result = []
log.debug(f"Get cache from {self._cache_file} for {len(indices)} key(s)")
for i in indices:
grp = handle[f"{self.name}/{i}"]
result.append(from_pickle_bytes(grp[()]))
return result
|
class Solution(object):
def wordPattern(self, pattern, str):
map1,map2 = {},{}
strs = str.strip().split(" ")
pattern = list(pattern)
if len(pattern) != len(strs):return False
for s,c in zip(strs,pattern):
if s not in map1 and c not in map2:
map1[s],map2[c] = c,s
elif s in map1 and c in map2:
if map1[s] != c or map2[c] != s:return False
else:
return False
return True
|
import numpy
class TelemeterAiData:
def __init__(self):
self.state = numpy.uint8(0)
self.cpu = numpy.uint8(0)
self.run_number = numpy.uint8(0)
self.output_file_number = numpy.uint8(0)
self.crc = numpy.uint8(0)
def concat_data(self):
ai_data = numpy.uint8(0)
ai_data = numpy.uint8(self.state << 7) | ai_data
ai_data = numpy.uint8(self.cpu) | ai_data
# ai_data = numpy.uint32(self.run_number << 16) | ai_data
# ai_data = numpy.uint32(self.output_file_number << 8) | ai_data
# ai_data = numpy.uint32(self.crc) | ai_data
return int(ai_data).to_bytes(length=1, byteorder='big', signed=False)
if __name__ == '__main__':
a = TelemeterAiData()
a.state = numpy.uint8(0)
a.cpu = numpy.uint8(13)
result = a.concat_data()
res = result.hex()
print(res)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
''' - Lecture de la table ROUND_MATCH
- saisie les scores du tour
- Mise à jour de la date de fin du tour en question ROUND
- Ecriture dans la table SCORE
- Rmise à blanc de la table ROUND_MATCH'''
import tkinter as tk
from tkinter.constants import DISABLED, END, NORMAL
from tinydb import TinyDB
db = TinyDB('db/db.json')
score_table = db.table('score')
class V_Score():
def __init__(self, master, serialized_round_match):
self.master = master
tournament_players = db.table('players')
serialized_players = tournament_players.all()
global id_tournament
id_tournament = (serialized_round_match[0]['id'])
players = (serialized_round_match[0]['joueurs'])
match = []
table_match = []
for item in players:
for i in item:
match.append(self.search(i, serialized_players))
table_match.append(match)
match = []
liste_match = []
for item in table_match:
match = (item[0]['indice'], item[0]['prenom'], item[0]['nom'],
item[1]['indice'], item[1]['prenom'], item[1]['nom'], 0, 0)
liste_match.append(match)
self.total_rows = len(liste_match)
self.total_columns = len(liste_match[0])
self.data = list()
for i in range(self.total_rows):
line = list()
for j in range(self.total_columns):
self.e = tk.Entry(self.master, width=20)
self.e.grid(row=i, column=j)
self.e.configure(state=NORMAL)
self.e.insert(END, liste_match[i][j])
if j < 6:
self.e.configure(state=DISABLED, disabledbackground='#6fedf8')
line.append(self.e)
self.data.append(line)
self.quit_btn = tk.Button(self.master, text="Quitter")
self.quit_btn.grid(row=10, column=3, padx=10, pady=10)
self.valid_btn = tk.Button(self.master, text="Valider")
self.valid_btn.grid(row=10, column=4)
def search(self, i, serialized_players):
"""Collecting player items"""
for element in serialized_players:
if element['indice'] == i:
return element
|
from django.contrib import admin
from .models import Product
# Register your models here.
@admin.register(Product)
class Product(admin.ModelAdmin):
list_display = ['name','weight','price','created_at','updated_at']
list_filter = ['created_at',]
list_editable = ['weight','price']
ordering = ['-price',] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Alex
# @Date: 2015-12-29 23:06:32
# @Last Modified by: Alex
# @Last Modified time: 2015-12-29 23:07:45
from django import forms
class LoginForm(forms.Form):
username = forms.CharField(max_length=50)
password = forms.CharField(max_length=50,
widget=forms.TextInput(attrs={
'type': 'password'
}))
|
import sys
sys.stdin = open('input.txt')
def realnum(password):
i = 1
while len(password) > 1:
if i >= len(password):
break
current = password.pop(-i)
if current == password[-i]:
password.pop(-i)
if i > 1:
i -= 1
else:
if i == 1:
password.insert(len(password), current)
else:
password.insert(-i+1, current)
i += 1
return ''.join(password)
for tc in range(1, 11):
n, password = input().split()
emp = []
for i in password:
emp.append(i)
print('#{} {}'.format(tc, realnum(emp))) |
from struct import *
from math import *
import cmath
import scipy
from numpy import *
from scipy.signal import *
import sys
#from os import *
import os
linen=10000
outn=1500
lines=int(sys.argv[2])
fi=open(sys.argv[1]+".bin","rb")
fi.seek(2*int(sys.argv[3])*10000,os.SEEK_SET)
f2=open(sys.argv[1]+"_"+sys.argv[3]+"mag.ppm","w")
f2.write("P5\n"+str(int(outn))+" "+str(int(lines/10))+"\n"+"255\n")
#f3=open(sys.argv[1]+"real.ppm","w")
#f3.write("P5\n"+str(int(outn))+" "+str(int(lines))+"\n"+"255\n")
line=zeros(int(linen),dtype=float)
linec=zeros(int(linen),dtype=complex)
linef=zeros(int(linen),dtype=complex)
lineout=zeros(int(outn),dtype=complex)
output=zeros([int(lines),int(outn)],dtype=complex)
lineoutr=zeros(int(outn),dtype=float)
lppm=zeros(int(outn),dtype=float)
for i in range(lines):
print "ln",i,"/",lines
aa=0x8000
bb=0x8000
cc=0x8000
while((aa!=0x0000 or bb!=0x8000)):
bb=aa
aa=int(unpack("H",fi.read(2))[0]&0x8000)
for j in range(linen):
#print i,j
aaa=int(unpack("H",fi.read(2))[0])
if(aaa&0x8000==0x8000):
break
#print "break"
#snlss[j]=0
else:
line[j]=int(aaa&0x3fff)
if(int(aaa&0x3fff)<0):
print "aaa",aaa,aaa&0x3fff
#print 1
#print snlss[j]
avg=sum(line)/len(line)
print avg,min(line),max(line)
for j in range(linen):
if(line[j]<0):
#print "fuck",snlss[j]
line[j]=avg
line[j]-=avg
if(j<4000):
line[j]=0;
for j in range(linen):
linec[j]=complex(line[j],0)
linef=fft.fftshift(fft.fft(line))
lineout=linef[int(linen/2):int(linen/2)+int(outn)]
output[i]=lineout
lppm+=abs(lineout)
lineoutr=real(lineout)
#print lineout
#lineoutr=lineoutr-(sum(lineoutr)/outn)
#lineoutr=lineoutr/(0.1+max(max(lineoutr),-min(lineoutr)))
#lineoutr=lineoutr*127+128
#print "l",max(lineoutr),min(lineoutr)
#for j in range(outn):
#f3.write(pack("B",int(lineoutr[j])))
if(i%10==9):
lppm=255*abs(lppm)/max(abs(lppm))
#lppm=255*sqrt(abs(lppm))/max(sqrt(abs(lppm)))
for j in range(int(outn)):
f2.write(pack("B",int(lppm[j])))
lppm=zeros(int(outn),dtype=float)
#for j in range(1,slen-1):
#if(snlss[j]-snlss[j-1]>1000 and snlss[j]-snlss[j+1]>1000):
#print "fuck!",snlss[j-1],snlss[j],snlss[j+1]
#snlss[j]=(snlss[j+1]+snlss[j-1])/2.0
#print "fucked",snlss[j-1],snlss[j],snlss[j+1]
#print "fuck!",snlss[j-1],snlss[j],snlss[j+1]'''
#snlssi=hilbert(snlss)
fi.close()
f2.close()
save(sys.argv[1]+"_"+sys.argv[3]+"output.npy",output)
|
#!/usr/bin/env python
# -*- encoding:utf-8 -*-
'''
Created on 2018年7月2日
@author: Administrator
'''
from sklearn import linear_model, datasets
import numpy as np
from sklearn.metrics import mean_squared_error, r2_score
import matplotlib.pyplot as plt
def linear_regression_example():
# 加载糖尿病数据样本
diabetes = datasets.load_diabetes()
# 只使用一个特征
diabetes_x = diabetes.data[:, np.newaxis, 2]
# 将样本数据分为训练和测试数据(后20个为测试数据)
diabetes_x_train = diabetes_x[:-20]
diabetes_x_test = diabetes_x[-20:]
# 将样本数据的Y值也分为训练和测试数据(后20个为测试数据)
diabetes_y_train = diabetes.target[:-20]
diabetes_y_test = diabetes.target[-20:]
# 创建一个线性回归对象
linear_regression = linear_model.LinearRegression()
# 求解线性归回参数
linear_regression.fit(diabetes_x_train, diabetes_y_train)
# 通过测试值预测
diabetes_y_pred = linear_regression.predict(diabetes_x_test)
print(u"求解的参数值 :")
print(linear_regression.coef_)
# 均方误差(均方差):反映估计量与被估计量之间差异程度的一种度量
print("均方差 : %.2f" % mean_squared_error(diabetes_y_test, diabetes_y_pred))
print('R2的值 : %.2f' % r2_score(diabetes_y_test, diabetes_y_pred))
# Plot outputs
plt.scatter(diabetes_x_test, diabetes_y_test, color='black')
plt.plot(diabetes_x_test, diabetes_y_pred, color='blue', linewidth=3)
plt.xlabel(u"测试样本值")
plt.ylabel(u"测试Y值或估算值")
plt.show()
if __name__ == '__main__':
linear_regression_example()
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 25 23:05:20 2019
@author: duck
"""
from matplotlib import pyplot as plt
"""with open("outputfile.dat", "r") as out:
lines = out.readlines()
A = []; B = [];
for i in lines:
print "i, len(i), i[0:7]:", i, len(i), i[0:7]
a = float(i[0:7]); A.append(a);#print a, "a"
b = float(i[8:]); B.append(b); #print b, "b";
plt.plot(A,B)"""
x = [[1,2,3,4,5],[1,2,3,4,5],[1,2,3,4,5],[1,2,3,4,5]]
#y = [10,15,20,30,50]
print x[2][2]
#plt.scatter(x,y) |
import pandas as pd
from chip2probe.sitespredict.imads import iMADS
from chip2probe.sitespredict.imadsmodel import iMADSModel
from chip2probe.sitespredict.pbmescore import PBMEscore
from chip2probe.sitespredict.kompas import Kompas
from chip2probe.sitespredict.sitesplotter import SitesPlotter
seq = "TTACGGCAAGCGGGCCGGAAGCCACTCCTCGAGTCT"
df = pd.DataFrame([[seq, 'seq1']], columns=['sequence', 'key'])
mutate_cutoff = 0.38
# path will be provided by user
escore_long_paths= {'ets1': "/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/escores/Ets1_8mers_11111111.txt",
'runx1': "/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/escores/Runx1_8mers_11111111.txt"
}
kmer_align_paths= {'ets1': "/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/kompas/ets1/Ets1_kmer_alignment.txt",
'runx1': "/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/kompas/runx1/Runx1_kmer_alignment.txt"
}
modelcores= {'ets1': ["GGAA", "GGAT"],
'runx1': ["GAGGT", "GCGGC", "GCGGG", "GCGGT", "GTGGC", "GTGGG", "GTGGT"]
}
modelpaths= {'ets1': ["/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/ets1/ETS1_100nM_Bound_filtered_normalized_transformed_20bp_GGAA_1a2a3mer_format.model",
"/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/ets1/ETS1_100nM_Bound_filtered_normalized_transformed_20bp_GGAT_1a2a3mer_format.model"],
'runx1': ["/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/runx1/Runx1_10nM_Bound_filtered_normalized_logistic_transformed_20bp_GAGGT_1a2a3mer_format.model",
"/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/runx1/Runx1_10nM_Bound_filtered_normalized_logistic_transformed_20bp_GCGGC_1a2a3mer_format.model",
"/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/runx1/Runx1_10nM_Bound_filtered_normalized_logistic_transformed_20bp_GCGGG_1a2a3mer_format.model",
"/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/runx1/Runx1_10nM_Bound_filtered_normalized_logistic_transformed_20bp_GCGGT_1a2a3mer_format.model",
"/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/runx1/Runx1_10nM_Bound_filtered_normalized_logistic_transformed_20bp_GTGGC_1a2a3mer_format.model",
"/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/runx1/Runx1_10nM_Bound_filtered_normalized_logistic_transformed_20bp_GTGGG_1a2a3mer_format.model",
"/Users/faricazjj/Box/homotf/chip2probe/chip2probe/data/imads/runx1/Runx1_10nM_Bound_filtered_normalized_logistic_transformed_20bp_GTGGT_1a2a3mer_format.model"]
}
imads_cutoff= {'ets1': 0.2128,
'runx1': 0.3061}
escores = {}
models = {}
proteins = ['ets1', 'runx1']
for tf in proteins:
escores[tf] = PBMEscore(escore_long_paths[tf])
models[tf] = Kompas(protein=tf, threshold=mutate_cutoff,
kmer_align_path=kmer_align_paths[tf])
es_preds = {}
esplots = {}
model_preds = {}
model_plots = {}
colors = [('crimson', 'plum'), ('steelblue', 'lightblue')]
# initialize escore and model objects for each protein
for protein in proteins:
protein_num = proteins.index(protein)
es_preds[protein] = escores[protein].predict_sequences(df, key_colname="key")
esplots[protein] = escores[protein].make_plot_data(es_preds[protein], color=colors[protein_num][0])
model_preds[protein] = models[protein].predict_sequences(df, key_colname="key", predict_flanks=False)
model_plots[protein] = models[protein].make_plot_data(model_preds[protein],
color=colors[protein_num][1],
show_model_flanks=False)
# Generate plots
sp = SitesPlotter()
# if need to plot, uncomment this
sp.plot_seq_combine([esplots, model_plots],
filepath="outlier.pdf")
|
from kerastuner import RandomSearch
from tensorflow.keras import utils
from tensorflow.keras.datasets import mnist
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Sequential
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train / 255
x_test = x_test / 255
y_train = utils.to_categorical(y_train, 10)
y_test = utils.to_categorical(y_test, 10)
def build_model(hp):
model = Sequential()
activation_choice = hp.Choice('activation', values=['relu', 'sigmoid', 'tanh', 'elu', 'selu'])
model.add(
Dense(
units=hp.Int('units_input', min_value=512, max_value=1024, step=32),
input_dim=784,
activation=activation_choice
)
)
model.add(Dense(units=hp.Int('units_hidden', min_value=128, max_value=800, step=32), activation=activation_choice))
model.add(Dense(10, activation='softmax'))
model.compile(
optimizer=hp.Choice('optimizer', values=['adam', 'rmsprop', 'SGD']),
loss='categorical_crossentropy', metrics=['accuracy']
)
return model
tuner = RandomSearch(
build_model,
objective='val_accuracy',
max_trials=336,
directory='test_directory'
)
tuner.search_space_summary()
tuner.search(x_train, y_train, batch_size=256, epochs=70, validation_split=0.1)
tuner.results_summary()
models = tuner.get_best_models(num_models=3)
for m in models:
m.summary()
# m.evaluate(x_test, y_test)
# print()
top = tuner.get_best_models(num_models=1)[0]
history = top.fit(
x_train, y_train, batch_size=256, epochs=70,
validation_split=0.1, verbose=1
)
scores = top.evaluate(x_test, y_test, verbose=1)
print("Score: ", round(scores[1] * 100, 4))
top.save('nums_top_model.h5')
|
from lib import game
import json
import copy
import Tree
import pylos as pyl
class Tree_Generator():
'''Creates the Tree chart for PylosAI'''
def __init__(self):
pass
def board_free(self, state):
'''
travel the board and save all the free positions in a list
:return: the list of the free positions
'''
board = list()
for layer in range(4):
for row in range(4-layer):
for column in range(4-layer):
try:
state.validPosition(layer, row, column)
if state.get(layer, row, column) is None:
board.append((layer, row, column))
except:
pass
return board
def board_remove(self, state, player):
'''
travel the board and save all the marbles that can be removed
:return: list of removable marbles
'''
board = list()
for layer in range(4):
for row in range(4 - layer):
for column in range(4 - layer):
try:
state.canMove(layer, row, column)
sphere = state.get(layer, row, column)
if sphere != player:
raise game.InvalidMoveException('not your sphere')
board.append((layer, row, column))
except:
pass
return board
def generate_from_free(self, tree, state):
# Case where the AI places a marble
children = []
for pos in self.board_free(state):
price = 1
child_state1 = copy.deepcopy(pyl.PylosState(state._state['visible']))
move = {'move': 'place', 'to': list(pos)}
child_state1.update(move, state._state['visible']['turn'])
if child_state1.createSquare(pos):
combi = self.board_remove(child_state1, state._state['visible']['turn'])
child_state2 = copy.deepcopy(pyl.PylosState(state._state['visible']))
if len(combi) >= 2:
move['remove'] = [combi[0], combi[1]]
else:
move['remove'] = combi
child_state2.update(move, child_state2._state['visible']['turn'])
price -= len(combi)
children.append(Tree.Tree(child_state2, price, move))
else:
child_state = copy.deepcopy(pyl.PylosState(state._state['visible']))
child_state.update(move, child_state._state['visible']['turn'])
children.append(Tree.Tree(child_state, price, move))
return children
def generate_from_remove(self, tree, state):
# Case where the AI deplaces an existing marble
children = []
for pos in self.board_remove(state, state._state['visible']['turn']):
price = 0
child_state1 = copy.deepcopy(pyl.PylosState(state._state['visible']))
transitory_state = copy.deepcopy(pyl.PylosState(state._state['visible']))
move = {'move': 'move', 'from': list(pos)}
transitory_state.remove(pos, child_state1._state['visible']['turn'])
for upperpos in self.board_free(transitory_state):
child_state2 = copy.deepcopy(pyl.PylosState(child_state1._state['visible']))
if upperpos[0] > pos[0]:
move['to'] = list(upperpos)
child_state2.update(move, child_state2._state['visible']['turn'])
if child_state2.createSquare(pos):
combi = self.board_remove(child_state1, state._state['visible']['turn'])
child_state3 = copy.deepcopy(pyl.PylosState(state._state['visible']))
if len(combi) >= 2:
move['remove'] = [combi[0], combi[1]]
else:
move['remove'] = combi
child_state3.update(move, child_state3._state['visible']['turn'])
price -= len(combi)
children.append(Tree.Tree(child_state3, price, move))
else:
children.append(Tree.Tree(child_state2, price, move))
return children
# test symetry
def rot(self, matrix):
for i in range(len(matrix)):
for j in range(i):
matrix[i][j], matrix[j][i] = matrix[j][i], matrix[i][j]
return matrix
def axisY(self, matrix):
for i in range(len(matrix)):
matrix[i].reverse()
return matrix
def axisX(self, matrix):
matrix.reverse()
return matrix
def noSymetry(self, matrix1, matrix2):
if matrix1 == self.rot(copy.deepcopy(matrix2)) or \
matrix1 == self.rot(self.rot(copy.deepcopy(matrix2))) or \
matrix1 == self.rot(self.rot(self.rot(copy.deepcopy(matrix2)))) or \
matrix1 == self.axisY(copy.deepcopy(matrix2)) or \
matrix1 == self.axisX(copy.deepcopy(matrix2)) or \
matrix1 == self.axisX(self.axisY(copy.deepcopy(matrix2))):
raise EnvironmentError
# Generate a Tree
def start(self, state):
t0 = Tree.Tree(state, 0, [])
self.generate_tree(t0)
t0.saveTree("TEST.json")
print('arbre sauvé')
def generate_tree(self, tree, it=0, gen=0):
# children = self.generate_from_free(tree, tree.state)
children = self.generate_from_free(tree, tree.state) + self.generate_from_remove(tree, tree.state)
if it >= 4:# mettre 4 poir le 1er tour mais 3 pour la suite du jeu
pass
else:
it += 1
for child in children:
if len(tree.children) == 0:
gen += 1
tree.addChild(child)
self.generate_tree(child, it, gen)
else:
try:
for ch in tree.children:
m1 = child.state._state['visible']['board'][0]
m2 = ch.state._state['visible']['board'][0]
self.noSymetry(m1, m2)
gen += 1
tree.addChild(child)
self.generate_tree(child, it, gen)
except:
# print('symetry')
pass
#print("t0f = ", tree)
#print(it, gen, sep=' : ')
return
|
## Functions to get COVID/population/school data from the web
import plotly.graph_objects as go
import dash
import numpy as np
import requests
import plotly.express as px
import dash_core_components as dcc
import dash_html_components as html
import plotly.figure_factory as ff
import pandas as pd
import json
from io import open
from os import path
from datetime import datetime, timedelta
import os
from zipfile import ZipFile
app = dash.Dash(__name__)
def refreshdata():
'''
Function to update the data periodically.
:return:
'''
def pullcovid():
'''
Function to pull COVID/population data once. Needs to run every time data is updated.
Get data with Kaggle API. Put data into numpy array
:return:
'''
date = datetime.date(datetime.now() - timedelta(days=2)) # get today's date
# Download COVID/population data from Kaggle
os.system("kaggle datasets download -d headsortails/covid19-us-county-jhu-data-demographics")
file_name = "covid19-us-county-jhu-data-demographics.zip"
# extract the zip file to get the COVID/population csv files
with ZipFile(file_name, 'r') as zip:
zip.extractall()
# Put csv files into numpy array
covidcases = pd.read_csv('./covid_us_county.csv')
popdata = pd.read_csv('./us_county.csv')
covidcases = covidcases[["fips", "date", "cases", "deaths"]] # only get relevant information in dataframe
popdata = popdata[["fips", "median_age", "population"]]
# Get only today's data for COVID
todaycases = covidcases[covidcases["date"] == str(date)]
# Get last 30 days of data
timeinc = timedelta(days=1) # decrement one day at a time
df_case_dict = {} # dictionary will contain dates as keys, dataframes of cases for every county as values
for i in range(30): # go 30 days back
df_case_dict[str(date)] = covidcases[covidcases["date"] == str(date)]
date = date - timeinc
todaycases.dropna(inplace=True)
todaycases['fips'] = todaycases['fips'].astype('int64', copy=True)
todaycases = todaycases[todaycases['fips'] < 80000].copy(deep=True)
todaycases['fips'] = todaycases['fips'].astype('str', copy=True)
todaycases['fips'] = todaycases['fips'].str.rjust(5, '0')
return todaycases, df_case_dict, popdata
def pullpublicschool(covid):
'''
Function to pull school location data. Only needs to run once
:return:
'''
schoolarr = []
school_name_arr = []
street_addr_arr = []
state_arr = []
city_arr = []
county_arr = []
lat_arr = []
lon_arr = []
zip_arr = []
fips_arr = []
states = ['AL', 'AK', 'AZ', 'AR', 'CA', 'CO', 'CT', 'DE', 'DC', 'FL', 'GA', 'HI', 'ID', 'IL', 'IN', 'IA', 'KS',
'KY', 'LA', 'ME', 'MD', 'MA', 'MI', 'MN', 'MS', 'MO', 'MT', 'NE', 'NV', 'NH', 'NJ', 'NM', 'NY', 'NC',
'ND', 'OH', 'OK', 'OR', 'PA', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VT', 'VA', 'WA', 'WV', 'WI', 'WY']
# Need: county FIPS, latitude, longitude, school name, school id
schooldata = requests.get("https://services1.arcgis.com/Ua5sjt3LWTPigjyD/arcgis/rest/services/Public_School_Location_201819/FeatureServer/0/query?where=1%3D1&outFields=*&outSR=4326&f=json")
for state in states:
schooldata = requests.get(
"https://services1.arcgis.com/Ua5sjt3LWTPigjyD/arcgis/rest/services/Public_School_Location_201819/FeatureServer/0/query?where=STATE%20%3D%20'" + state + "'&outFields=NAME,STREET,CITY,STATE,ZIP,CNTY,NMCNTY,LAT,LON&outSR=4326&f=json")
schools = schooldata.json() # school data is now a dictionary
schoollist = schools["features"] # dictionary of all the schools
# schoolarr = np.array(schoollist[0].keys()) # Header of array is names of the attributes
for school in schoollist:
attr = school["attributes"]
fips = str(attr["CNTY"])
query = "fips =='"+attr["CNTY"]+"'"
name = str(attr["NAME"])
# print(str(covid.query(query)["cases"].values[0]))
text = name + "<br>Cases in county: " + str(covid.query(query)["cases"].values[0])
school_name_arr.append(text)
street_addr_arr.append(attr["STREET"])
city_arr.append(attr["CITY"])
county_arr.append(attr["NMCNTY"])
fips_arr.append(attr["CNTY"])
state_arr.append(attr["STATE"])
zip_arr.append(attr["ZIP"])
lat_arr.append(attr["LAT"])
lon_arr.append(attr["LON"])
# np.concatenate(schoolarr, school.values()) # adding the values of the school to the array
diction = {"name": school_name_arr, "street": street_addr_arr, "city": city_arr,
"county_name": county_arr, "fips": fips_arr, "state": state_arr, "zip": zip_arr, "lat": lat_arr,
"lon": lon_arr}
return json.dumps(diction)
def generateSchoolMap(covid):
# load data first time only
schoollist = pullpublicschool(covid)
myfile = open("schools.json", "w")
myfile.write(schoollist.decode("utf-8"))
df = pd.read_json(open("schools.json", "r", encoding="utf8"), lines=True)
# make the map
fig = go.Figure(data=go.Scattergeo(
lon=df['lon'][0],
lat=df['lat'][0],
text=df['name'][0],
mode='markers',
))
fig.update_layout(
title='Public schools across America',
geo_scope='usa',
)
#fig.write_html("temp1.html", auto_open=True)
return fig
covid_today, case_dict, pop = pullcovid()
fig = generateSchoolMap(covid_today)
response = requests.get('https://raw.githubusercontent.com/plotly/datasets/master/geojson-counties-fips.json')
counties = response.json()
fips = covid_today["fips"]
fig2 = px.choropleth(covid_today, geojson=counties, locations='fips', color='cases',
range_color=(0, 5000),
scope='usa',
color_continuous_scale = "rainbow"
)
# fig2 = ff.create_choropleth(
# fips=fips, values=covid_today["cases"], scope=['CA', 'AZ', 'Nevada', 'Oregon', ' Idaho'],
# county_outline={'color': 'rgb(255,255,255)', 'width': 0.5}, round_legend_values=True,
# legend_title='Covid cases', title='Covid cases West Coast'
# )
#fig2.write_html("temp2.html", auto_open=True)
def layout_for_site(fig1, fig2):
app.title = "Covid Data"
app.layout = html.Div(children=[
html.Div([
html.H1("Public Schools K-12"),
dcc.Graph(
id="current-graph2",
figure=fig1),
]),
html.Div([
html.Div([
html.Span('Select Date:'),
], className='Grid-cell',
),
dcc.Slider(
id='case_slider',
min=1,
max=30,
step=1,
value=30
),
], className='Grid-cell',
),
html.Div([
html.H1("Covid Cases per County"),
dcc.Graph(
id="current-graph",
figure=fig2),
]),
])
@app.callback(
dash.dependencies.Output('current-graph', 'figure'),
[dash.dependencies.Input('case_slider','value')])
def update_output(value):
# Update fig2
currtime = timedelta(days=30-value)
print(currtime)
today = datetime.date(datetime.today())-timedelta(days=1)
print(today)
#todaycases = case_dict["date" == str(today-currtime)]
print(case_dict)
var = None
for case,df in case_dict.items():
print(case)
if case == str(today-currtime):
var = df
fig2 = px.choropleth(var, geojson=counties, locations='fips', color='cases',
range_color=(0, 5000),
scope='usa',
color_continuous_scale="rainbow"
)
return fig2
layout_for_site(fig, fig2)
# @app.callback(
# dash.dependencies.Output('current-graph', 'figure'),
# [dash.dependencies.Input('ngram-dropdown', 'value'),
# dash.dependencies.Input('date-slider', 'value')]
# )
# def update_output(ngram, date_index):
# pass
if __name__ == '__main__':
app.run_server(debug=True) |
# coding: utf-8
""" Data generation example.
"""
import os
is_travis = ('TRAVIS' in os.environ)
if is_travis:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from pybold.data import gen_rnd_bloc_bold
from pybold.hrf_model import spm_hrf
from pybold.utils import fwhm, tp
###############################################################################
# generate the signal
tr = 1.0
snr = 1.0
dur = 4 # minutes
hrf_dur = 30.0
hrf, t_hrf = spm_hrf(1.0, t_r=tr, dur=hrf_dur)
hrf_fwhm = fwhm(t_hrf, hrf)
hrf_tp = tp(t_hrf, hrf)
params = {'dur': dur,
'tr': tr,
'hrf': hrf,
'nb_events': 4,
'avg_dur': 1,
'std_dur': 4,
'overlapping': False,
'snr': snr,
'random_state': 0,
}
res = gen_rnd_bloc_bold(**params)
noisy_ar_s, ar_s, ai_s, i_s, t, _, noise = res
###############################################################################
# plotting
fig = plt.figure(1, figsize=(15, 7))
# axis 1
ax1 = fig.add_subplot(3, 1, 1)
label = "Noisy BOLD signal, SNR={0}dB, TR={0}s".format(snr, tr)
ax1.plot(t, noisy_ar_s, '-y', label=label, lw=3)
ax1.plot(t, ar_s, '-b', label="Denoised BOLD signal, TR={0}s".format(tr), lw=3)
ax1.set_xlabel("time (s)")
ax1.set_ylabel("ampl.")
ax1.legend(fontsize=15, framealpha=0.3)
# axis 2
ax2 = fig.add_subplot(3, 1, 2)
ax2.plot(t, ai_s, '-r', label="Block signal", lw=3)
ax2.stem(t, i_s, '-g', label="Dirac source signal")
ax2.set_xlabel("time (s)")
ax2.set_ylabel("ampl.")
ax2.set_ylim(-1.5, 1.5)
ax2.legend(fontsize=15, framealpha=0.3)
ax2.set_title("Source signals, TR={0}s".format(tr), fontsize=15)
# axis 3
ax3 = fig.add_subplot(3, 1, 3)
ax3.plot(t_hrf, hrf, label="Original HRF", lw=3)
ax3.set_xlabel("time (s)")
ax3.set_ylabel("ampl.")
ax3.legend(fontsize=15, framealpha=0.3)
title = (r"HRF, TR={0}s, FWHM={1:.2f}s, "
"TP={2:.2f}s".format(tr, fwhm(t_hrf, hrf), tp(t_hrf, hrf)))
ax3.set_title(title, fontsize=15)
plt.tight_layout()
filename = "gen_data.png"
print("Saving plot under '{0}'".format(filename))
plt.savefig(filename)
|
import cv2
import numpy as np
img = cv2.imread("colorBalls.jpg")
HSV = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
verdeoInf = np.array([70,100,100])
verdeoSup = np.array([100,255,255])
verdeoMask = cv2.inRange(HSV,verdeoInf,verdeoSup)
res = cv2.bitwise_and(img, img, mask=verdeoMask)
cv2.imshow('img', img)
cv2.imshow('mask', verdeoMask)
cv2.imshow('verdeo', res)
cv2.waitKey(0)
|
#!/usr/bin/env python
# coding: utf-8
# # THE SPARKS FOUNDATION
#
# # VIVISHA SINGH
# # Data Science and Business Analytics
# # Task-1
# # Prediction using Supervised ML
# # Predict the percentage of an student based on the no. of study hours.
# In[ ]:
#importing all libraries
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
# In[9]:
mydataset=pd.read_csv("http://bit.ly/w-data") #reading the csv file or dataset
# In[10]:
mydataset
# In[11]:
mydataset.head() #viewing top 5 rows of data
# In[12]:
# checking for NULL values
mydataset.isnull().sum()
# In[14]:
#checking for linearity
plt.scatter(mydataset['Hours'],mydataset['Scores'])
plt.title('Hours vs Percentage')
plt.xlabel('Hours Studied')
plt.ylabel('Percentage Score')
plt.show()
# In[15]:
#correlation matrix
cor=mydataset.corr()
cor # variables positively /highly co-related
# # Training the Model
# 1) Splitting the Data
# In[20]:
# Defining X and y from the Data
X = data.iloc[:, :-1].values
y = data.iloc[:, 1].values
# Spliting the Data in two
train_X, val_X, train_y, val_y = train_test_split(X, y, random_state = 0)
# # 2) Fitting the Data into the model
# In[21]:
regression = LinearRegression()
regression.fit(train_X, train_y)
print("---------Model Trained---------")
# # Predicting the Percentage of Marks
# In[22]:
pred_y = regression.predict(val_X)
prediction = pd.DataFrame({'Hours': [i[0] for i in val_X], 'Predicted Marks': [k for k in pred_y]})
prediction
# # Comparing the Predicted Marks with the Actual Marks
# In[23]:
compare_scores = pd.DataFrame({'Actual Marks': val_y, 'Predicted Marks': pred_y})
compare_scores
# # Visually Comparing the Predicted Marks with the Actual Marks
# In[24]:
plt.scatter(x=val_X, y=val_y, color='blue')
plt.plot(val_X, pred_y, color='Black')
plt.title('Actual vs Predicted', size=20)
plt.ylabel('Marks Percentage', size=12)
plt.xlabel('Hours Studied', size=12)
plt.show()
# # CHECKING ACCURACY OF THE MODEL
# In[28]:
from sklearn import metrics
from sklearn.metrics import mean_absolute_error
# In[29]:
print('Mean absolute error: ',metrics.mean_absolute_error(val_y,pred_y)) #less error
# # What will be the predicted score of a student if he/she studies for 9.25 hrs/ day?
# In[30]:
hours = [9.25]
answer = regression.predict([hours])
print("Score = {}".format(round(answer[0],3)))
# # According to the regression model if a student studies for 9.25 hours a day he/she is likely to score 93.89 marks.
|
import numpy
from numpy.random import randn
import matplotlib.pyplot as plt
#A Brief matplotlib API Primer
fig = plt.figure()
#to adjust the figure
fig.subplots_adjust(left=0.0, bottom=None, right=1.2, top=None,
wspace=0, hspace=None)
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2)
ax3 = fig.add_subplot(2, 2, 3)
#fig.show()
#plt.plot()
#o option for plotting the points
plt.plot(randn(50000).cumsum(), 'go--',label="random walk")
#g-- is the same as:
#linestyle='--', color='g'
_ = ax1.hist(randn(100), bins=20, color='k', alpha=0.3)
ax2.scatter(numpy.arange(30), numpy.arange(30) + 3 * randn(30))
plt.plot(drawstyle='steps-post', label='steps-post')
plt.show()
#Adjusting the spacing around subplots
#Ticks, Labels, and Legends
#sets the axes range to 0 to 10
#plt.xlim([0, 10])
#plt.show()
fig = plt.figure(); ax = fig.add_subplot(1, 1, 1)
ax.plot(randn(1000).cumsum())
#sets the label of the plot
ticks = ax.set_xticks([0, 250, 500, 750, 1000])
labels = ax.set_xticklabels(['one', 'two', 'three', 'four', 'five'],
rotation=30, fontsize='small')
ax.set_title("My first plot title")
plt.show()
|
import torch
def arrange_data(data_tmp, M, gen_rand_indx, rand_idx):
# Extracts a batch of data from the dataset loader
x_tmp = []
v_tmp = []
for data in data_tmp:
x_tmp.append(torch.stack([x[0] for x in data]))
v_tmp.append(torch.stack([v[1] for v in data]))
if gen_rand_indx:
rand_idx = torch.LongTensor(1).random_(0, 15)
x_tmp = torch.stack(x_tmp)
v_tmp = torch.stack(v_tmp)
x_tmp = x_tmp.squeeze(0)
v_tmp = v_tmp.squeeze(0)
x_tmp = x_tmp.squeeze(2)
v_tmp = v_tmp.squeeze(2)
x_tmp = x_tmp.permute(1, 0, 2, 3, 4)
v_tmp = v_tmp.permute(1, 0, 2)
# Remove unecesary dimensions from black & white img
x_tmp = x_tmp.narrow(dim=2, start=0, length=1)
v_tmp_xyz = v_tmp.narrow(dim=2, start=0, length=3)
v_tmp_jawpitch = v_tmp.narrow(dim=2, start=3, length=2)
# Class condition in form of a class unique number
v_tmp_classID = v_tmp.narrow(dim=2, start=5, length=1)
# Get ID down to the same scale as the other features.
v_tmp_classID = torch.div(v_tmp_classID, 1000000)
v_tmp_jawpitch_cosed = torch.cos(v_tmp_jawpitch)
v_tmp_jawpitch_sined = torch.sin(v_tmp_jawpitch)
v_pitch_cosed = v_tmp_jawpitch_cosed.narrow(dim=2, start=0, length=1)
v_pitch_sined = v_tmp_jawpitch_sined.narrow(dim=2, start=0, length=1)
v_jaw_cosed = v_tmp_jawpitch_cosed.narrow(dim=2, start=0, length=1)
v_jaw_sined = v_tmp_jawpitch_sined.narrow(dim=2, start=0, length=1)
v_tmp = torch.cat([torch.cat([v_tmp_xyz, v_jaw_cosed], dim=2), v_jaw_sined], dim=2)
v_tmp = torch.cat([torch.cat([v_tmp, v_pitch_cosed], dim=2), v_pitch_sined], dim=2)
v_tmp = torch.cat([v_tmp, v_tmp_classID], dim=2)
q_tmp = v_tmp[:,rand_idx, :]
v_real_query = q_tmp
ground_truth = x_tmp[:,rand_idx, :, :, :]
# Keep 0:M frames from scene
x_real = x_tmp.narrow(dim= 1, start=0, length=M)
v_real = v_tmp.narrow(dim= 1, start=0, length=M)
ground_truth = ground_truth.squeeze(dim=1)
return x_real, v_real, v_real_query, rand_idx, ground_truth |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
信号槽连接滑块LCD
'''
import sys
from PyQt5.QtWidgets import QWidget, QApplication, QVBoxLayout, QLCDNumber, QSlider
from PyQt5.QtCore import Qt
class WinForm(QWidget):
def __init__(self, parent=None):
super(WinForm, self).__init__(parent)
self.initUI()
def initUI(self):
self.setGeometry(300, 300, 350, 150)
self.setWindowTitle("信号与槽:连接滑块LCD")
lcd = QLCDNumber(self)
slider = QSlider(Qt.Horizontal, self)
vBox = QVBoxLayout()
vBox.addWidget(lcd)
vBox.addWidget(slider)
self.setLayout(vBox)
slider.valueChanged.connect(lcd.display)
if __name__ == "__main__":
app = QApplication(sys.argv)
form = WinForm()
form.show()
sys.exit(app.exec_())
|
#!/usr/bin/env python3.4
# -*- coding: utf-8 -*-
from constants import get_all_emotions, get_all_features
from itertools import islice
import glob
import csv
import pdb
def get_defined_landmarks(file_path):
'''
Retrieve landmarks defined in lm2 file
'''
defined_landmarks = []
with open(file_path) as file:
# Start reading file after the 'Labels:' line
start_line = 5
for line in islice(file, start_line, None):
# Stop reading lines as soon as an empty line is discovered
if line in ['\n', '\r\n']:
break
defined_landmarks.append(line.rstrip('\n\r')) # Remove end of line character
return defined_landmarks
def get_undefined_landmarks_idx(all_landmarks, defined_landmarks):
'''
Return index of landmarks which are undefined in the lm2 file
'''
undefined_landmarks_idx = []
for idx, landmark in enumerate(all_landmarks):
if not landmark in defined_landmarks:
undefined_landmarks_idx.append(idx)
return undefined_landmarks_idx
def get_lm2_features(file_path):
'''
Return value of each landmark in file
'''
try:
# Find undefined landmarks in lm2 file
all_landmarks = get_all_features()
defined_landmarks = get_defined_landmarks(file_path)
undefined_landmarks_idx = get_undefined_landmarks_idx(all_landmarks, defined_landmarks)
# Find landmark values defined in lm2 file
defined_landmarks_values = []
with open(file_path) as file:
# Start reading file after the '2D Image coordinates' line
start_line = len(defined_landmarks) + 7
for line in islice(file, start_line, None):
defined_landmarks_values.append(line.rstrip('\n\r'))
# Fill-in undefined values in landmark as 'NaN'
for idx in undefined_landmarks_idx:
defined_landmarks_values.insert(idx, 'NaN')
# Must have the same number of landmark values as there are possible landmarks (since we filled the undefined ones with 'NaN')
assert(len(defined_landmarks_values) == len(all_landmarks))
# Unpack feature values in x and y coordinates
features = []
for value in defined_landmarks_values:
if value == 'NaN':
features += ['NaN'] * 2 # Feature value is undefined in both coordinates
else:
features += map(float, value.split())
# Must have twice the number of features now
assert(len(features) == 2 * len(all_landmarks))
return features
except Exception as e:
print('Error: ' + file_path)
def get_lm2_label(file_path):
'''
Given a file path, return the label its data represents
'''
for x in get_all_emotions():
if x in file_path:
return x
def parse_lm2_features(file_path):
'''
Return the label and features represented by an lm2 file
'''
return [get_lm2_label(file_path)] + get_lm2_features(file_path)
def get_lm2_files_paths():
'''
Return all file paths of lm2 files in the data directory
'''
file_paths = []
data_dir = r'data/bosphorusDB/__files__/__others__/**/*.lm2'
for file_path in glob.glob(data_dir):
# Only read lm2 files which are related to emotions
for x in get_all_emotions():
if x in file_path:
file_paths.append(file_path)
return file_paths
def create_csv(features):
'''
Create CSV file of features
'''
# Create x and y coordinate names for each landmark
headers = ['Label']
for landmark in get_all_features():
headers.append(landmark + '-x')
headers.append(landmark + '-y')
# Write out to .csv file
features.insert(0, headers)
csv_file = r'data/lm2.csv'
with open(csv_file, 'w') as output:
writer = csv.writer(output, lineterminator='\n')
writer.writerows(features)
if __name__ == '__main__':
file_paths = get_lm2_files_paths()
features = [parse_lm2_features(file_path) for file_path in file_paths]
create_csv(features)
|
import unittest
from database.src.Database import Database
from web.service.github.api.v3.AuthenticationsCreator import AuthenticationsCreator
from web.service.github.api.v3.authentication.Authentication import Authentication
from web.service.github.api.v3.authentication.NonAuthentication import NonAuthentication
from web.service.github.api.v3.authentication.BasicAuthentication import BasicAuthentication
from web.service.github.api.v3.authentication.TwoFactorAuthentication import TwoFactorAuthentication
from web.service.github.api.v3.authentication.OAuthAuthentication import OAuthAuthentication
from web.service.github.api.v3.authentication.OAuthTokenFromDatabaseAuthentication import OAuthTokenFromDatabaseAuthentication
from web.service.github.api.v3.authentication.OAuthTokenFromDatabaseAndCreateApiAuthentication import OAuthTokenFromDatabaseAndCreateApiAuthentication
class TestAuthenticationsCreator_BlackBox(unittest.TestCase):
def test_Create_OAuthAuthentication_BasicAuthentication(self):
db = Database()
db.Initialize()
username = 'ytyaru' # 存在するユーザ名。Token登録済み。TwoFactorSecretなし。
creator = AuthenticationsCreator(db, username)
authentications = creator.Create() # [OAuthAuthentication, BasicAuthentication]
self.assertEqual(list, type(authentications))
self.assertEqual(2, len(authentications))
self.assertEqual(OAuthAuthentication, type(authentications[0]))
self.assertEqual(BasicAuthentication, type(authentications[1]))
def test_Create_OAuthAuthentication_TwoFactorAuthentication(self):
db = Database()
db.Initialize()
username = 'csharpstudy0' # 存在するユーザ名。Token登録済み。TwoFactorSecretあり。
creator = AuthenticationsCreator(db, username)
authentications = creator.Create() # [OAuthAuthentication, TwoFactorAuthentication]
self.assertEqual(list, type(authentications))
self.assertEqual(2, len(authentications))
self.assertEqual(OAuthAuthentication, type(authentications[0]))
self.assertEqual(TwoFactorAuthentication, type(authentications[1]))
def test_Create_UnregisteredException_ConstractorParameter(self):
db = Database()
db.Initialize()
username = 'NoneExistUsername' # 存在しないユーザ名
creator = AuthenticationsCreator(db, username)
with self.assertRaises(Exception) as e:
creator.Create()
self.assertEqual(e.msg, '指定したユーザ {0} はDBに未登録です。登録してから実行してください。'.format(username))
def test_Create_UnregisteredException_MethodParameter(self):
db = Database()
db.Initialize()
username = 'ytyaru' # 存在するユーザ名
creator = AuthenticationsCreator(db, username) #
with self.assertRaises(Exception) as e:
username = 'NoneExistUsername' # 存在しないユーザ名
creator.Create(username=username)
self.assertEqual(e.msg, '指定したユーザ {0} はDBに未登録です。登録してから実行してください。'.format(username))
|
t = int(input().strip())
for i in range(t):
n,c,m = input().strip().split(' ')
n,c,m = [int(n),int(c),int(m)]
tot_choc=n//c
wrap=tot_choc
#print(tot_choc, wrap)
while wrap >= m:
extra_choc = wrap//m
wrap-=m*extra_choc
wrap+=extra_choc
tot_choc+= extra_choc
print(tot_choc)
|
"""
Combat handler.
"""
from muddery.server.combat.combat_runner.base_combat import BaseCombat, CStatus
from muddery.server.utils.honours_handler import HONOURS_HANDLER
class HonourCombat(BaseCombat):
"""
This implements the honour combat handler.
"""
def __del__(self):
"""
This hook is called whenever the server is shutting down fully
(i.e. not for a restart).
"""
for char in self.characters.values():
# Stop auto cast skills
character = char["char"]
character.stop_auto_combat_skill()
async def calc_winners(self):
"""
Calculate combat winners and losers.
"""
winner_team = None
for char in self.characters.values():
if char["status"] == CStatus.ACTIVE and char["char"].is_alive:
winner_team = char["team"]
break
winners = {char_id: char["char"] for char_id, char in self.characters.items()
if char["status"] == CStatus.ACTIVE and char["team"] == winner_team}
# all escaped characters are losers
losers = {char_id: char["char"] for char_id, char in self.characters.items() if char_id not in winners}
return winners, losers
async def calc_combat_rewards(self, winners, losers):
"""
Called when the character wins the combat.
Args:
winners: (List) all combat winners.
losers: (List) all combat losers.
Returns:
None
"""
rewards = await super(HonourCombat, self).calc_combat_rewards(winners, losers)
# set honour
winners_db_id = [char.get_db_id() for char in winners.values()]
losers_db_id = [char.get_db_id() for char in losers.values()]
honour_changes = await HONOURS_HANDLER.set_honours(winners_db_id, losers_db_id)
for char_id, char_info in self.characters.items():
char_db_id = char_info["db_id"]
if char_db_id is None:
continue
if char_id not in rewards:
rewards[char_id] = {}
rewards[char_id]["honour"] = honour_changes[char_db_id] if char_db_id in honour_changes else 0
return rewards
|
""" Chalenge 167 Task 1 Python"""
from sympy import isprime, nextprime
def is_circular_prime(prime: int) -> bool:
"""
Checks if a prime number is circular prime.
"""
prime_str = str(prime)
for i in range(len(prime_str)):
if not isprime(int(prime_str[i:] + prime_str[:i])):
return False
return True
def generate_primes(limit: int) -> list:
"""
Generates all prime numbers below a limit.
"""
primes: list = []
prime = nextprime(100)
while len(primes) < limit:
if is_circular_prime(prime):
primes.append(prime)
prime = nextprime(prime)
return primes
assert is_circular_prime(2) == 1
assert is_circular_prime(3) == 1
assert is_circular_prime(11) == 1
assert is_circular_prime(13) == 1
assert is_circular_prime(57) == 0
assert is_circular_prime(197) == 1
# Note that the example given on the challenge is wrong according to
# https://oeis.org/A068652
assert generate_primes(10) == [113, 131, 197, 199, 311, 337, 373, 719, 733, 919]
|
from neuron import h
from utils.cell import Cell
class TableCell(Cell):
def _create_cell(self):
self.section = h.Section()
self.section.insert("table")
def record(self):
tvec = h.Vector()
tvec.record(h._ref_t, sec=self.section)
avec = h.Vector()
avec.record(self.section(0.5)._ref_ainf_table, sec=self.section)
self.record_vectors["ainf"] = avec
if __name__ == "__main__":
table_cell = TableCell()
table_cell.record()
table_cell.simulate(1, 0.1)
table_cell.output()
del table_cell
|
import os
import statistics
import numpy as np
import matplotlib.pyplot as plt
def LCG(x, a=np.uint64(1664525), c=np.uint64(1013904223)):
"""Linear Congruential Generator
Args:
x (int): Seed
a (uint64, optional): Miltiplier. Defaults to np.uint64(1664525).
c (uint64, optional): Increment. Defaults to np.uint64(1013904223).
Returns:
int: Next generated pseudo-random-number
"""
return (a*x + c) % 2**64
def XOR_shift(x, a1=np.uint64(13), a2=np.uint64(11), a3=np.uint64(3)):
"""XOR-shift random number generator
Args:
x (int): state
a1 (unit64, optional): First right bitshift. Defaults to np.uint64(13).
a2 (unit64, optional): First left bitshift. Defaults to np.uint64(11).
a3 (uint64, optional): second right bitshift. Defaults to np.uint64(3).
Returns:
int: Next generated pseudo-random-number
"""
x = np.uint64(x)
x = x ^ (x >> a1)
x = x ^ (x << a2)
x = x ^ (x >> a3)
return x
def RNG(x):
"""Pseudo-random-number generator using a combination of linear congruential
generatorts and XOR-shift generators. The structure of the generator is as
follows:
LCG1 -> (XOR1 ^ XOR2) -> LCG2.
Args:
x ([type]): [description]
Yields:
[type]: [description]
"""
while True:
x = LCG(np.uint64(x))
x = XOR_shift(x) ^ XOR_shift(x,
np.uint64(15),
np.uint64(13),
np.uint64(9)
)
x = LCG(
x,
np.uint64(6364136223846793005),
np.uint64(1442695040888963407)
)
yield x / (2**(64)-1)
def parabola_min(f, x1, x2, x3):
"""Find the minimum a parabole going through points x1, x2 and x3.
Args:
f (callable): function to calculate yi values corresponding to xi
x1 (float): x1 coordinate
x2 (float): x2 coordinate
x3 (float): x3 coordinate
Returns:
float: x coordinate of minimum
"""
f1,f2,f3 = f(x1), f(x2), f(x3)
numerator = (x2-x1)**2*(f2-f3) - (x2-x3)**2*(f2 - f1)
denominator = (x2-x1)*(f2-f3) - (x2-x3)*(f2 - f1)
return x2 - 0.5 * numerator / denominator
def bracketing(f, a, b, w=1.618):
""" Bracketing a minimum, using parabolic interpolation.
Args:
f (callable): Function for which to find root
a (float): boundry of bracket
b (float): boundry of bracket
w (float, optional): splitting fraction of bracket. Defaults to 1.618.
Returns:
list/float: list of float containig bracket
"""
# ensure that a < b
if f(b) > f(a):
a, b = b, a
# make a guess for c
c = b + (b-a)*w
# if on the right hand side of b, retrun bracket [a,b,c]
if f(c) > f(b):
return [a, b, c]
# find the minimum of the parabola throuh [a,b,c]
d = parabola_min(f,a,b,c)
# find out the order of the new bracket and return smallest bracket
if f(d)<f(c):
return [b, d, c]
elif f(d)>f(b):
return [a,b,d]
# if d is to far from b, take section step
elif abs(d-b) > 100*abs(c-b):
d = c+(c-b)*w
return [b,c,d]
else:
return[b,c,d]
def golden_section(f, xmin, xmax, target_acc=1e-6, maxit=1e4):
"""Finding the mimimum of a function, f, in the range [xmin, xmax] using the
Golden section algorithm.
Args:
f (callable): Function fo which to find minimum
xmin (float): left boundry of bracket
xmax (float): right boundry of bracket
target_acc (float, optional): Target accuracy. Defaults to 1e-6.
maxit (int, optional): Maximum number of iterations. Defaults to 1e4.
Returns:
float: x-value of the obtained minimum
"""
w = 0.38197 # 2-phi
i = 0
# Bracket the minimum using bracketing algorithm
a,b,c = bracketing(f,xmin, xmax)
while i < maxit:
# Identify larger interval
if abs(c-b) > abs(b-a):
x1, x2 = b, c
else:
x1, x2 = a, b
# Choose new point in a self similar way
d = b + (x2 -x1)*w
# abort if target acc reached and return best value
if abs(c-a) < target_acc:
if f(d) < f(b):
return b
else:
return d
# Tighten the bracket
if f(d) < f(b):
if x1 == b and x2 == c:
a, b = b, d
elif x1 == a and x2 == b:
c, b = b, d
else:
if x1 == b and x2 == c:
c = d
elif x1 == a and x2 == b:
a = d
i+=1
# if maxit reached, return last d
return d
def rejection_sampling(p, p_max, xrange, n, rng):
"""Rejection sampling of function p usning random number generator function rng. The maximum value of p is used to normalise the function.
Args:
p (callable): Distribution to sample
p_max (float): Normalisation factor of p
xrange (list): xmin an xmax value to sample for
n (int): Number of points to sample
rng (generator): Random number generator object
Returns:
list: x coordinates of accepted points
"""
accepted_points = []
while len(accepted_points) < n:
x = next(rng) * (xrange[1]-xrange[0]) + xrange[0]
y = next(rng)
if p(x)/p_max > y: # accept x
accepted_points.append(x)
return accepted_points
def quicksort_with_indexing(l, index_l):
"""Implementation of quicksort algorithm. Hence, this implementation is not
entirely correct. It fails for list with duplicates. For the problems at
hand, this does not raise any problems. However this has to be fixed.
This implementation is slightly different from quicksort(), since it keeps
track of the permutations on l.
Pivot is choosen as median of first, median and last element.
This implementation is recursive
Args:
l (list): list to sort
index_l (list): [0,1,..,len(l)-2, len(l)-1]
Returns:
(list,list): sorted list, permutations list
"""
# if len list is 2, return ellements in appropriate order
if len(l)==2:
pivot = l[-1]
if l[0] > l[-1]:
l[0], l[-1] = l[-1], l[0]
# keep track of permutations
index_l[0], index_l[-1] = index_l[-1], index_l[0]
return l, index_l
# if len list is smaller than 2, return list
elif len(l) < 2:
return l, index_l
# Choose a pivot and put it in the right order
else:
pivot = statistics.median([l[0], l[len(l)>>1], l[-1]])
if l[0] == pivot:
l[0], l[len(l)>>1] = l[len(l)>>1], l[0]
# keep track of permutations
index_l[0], index_l[len(l)>>1] = index_l[len(l)>>1], index_l[0]
elif l[-1] == pivot:
l[-1], l[len(l)>>1] = l[len(l)>>1], l[-1]
# keep track of permutations
index_l[-1], index_l[len(l)>>1] = index_l[len(l)>>1], index_l[-1]
idx_pivot = len(l)>>1
if l[0] > l[-1]:
l[0], l[-1] = l[-1], l[0]
# keep track of permutations
index_l[0], index_l[-1] = index_l[-1], index_l[0]
i = 1
j = len(l)-1
while j >= i:
# increase i until l[i] is larger than pivot
while not l[i] >= pivot:
i+=1
# decrease j until l[j] is smaller than pivot
while not l[j] <= pivot:
j-=1
# Indeces hace crossed, brak while loop
if j < i:
break
# swap elements i and j
l[i], l[j] = l[j], l[i]
# keep track of permutations
index_l[i], index_l[j] = index_l[j], index_l[i]
# if the pivot is swaped, the index of the pivot changes and has to be
# updated.
if i == idx_pivot:
idx_pivot = j
i += 1
elif j == idx_pivot:
idx_pivot = i
j -= 1
# split list and permutation list below and above pivot
l_lower = l[:idx_pivot]
l_upper = l[idx_pivot+1:]
index_lower = index_l[:idx_pivot]
index_upper = index_l[idx_pivot+1:]
# recursively call quicksort for lower and upper list and permutaion list
l_lower, index_lower = quicksort_with_indexing(l_lower, index_lower)
l_upper, index_upper = quicksort_with_indexing(l_upper, index_upper)
return l_lower + [pivot] + l_upper, \
index_lower + [index_l[idx_pivot]] + index_upper
def quicksort(l):
"""Implementation of quicksort algorithm. Hence, this implementation is not
entirely correct. It fails for list with duplicates. For the problems at
hand, this does not raise any problems. However this has to be fixed.
Pivot is choosen as median of first, median and last element.
This implementation is recursive
Args:
l (list): list to sort
Returns:
list: sorted list
"""
# if len list is 2, return ellements in appropriate order
if len(l)==2:
pivot = l[-1]
if l[0] > l[-1]:
l[0], l[-1] = l[-1], l[0]
return l
# if len list is smaller than 2, return list
elif len(l) < 2:
return l
# Choose a pivot and put it in the right order
else:
pivot = statistics.median([l[0], l[len(l)>>1], l[-1]])
if l[0] == pivot:
l[0], l[len(l)>>1] = l[len(l)>>1], l[0]
elif l[-1] == pivot:
l[-1], l[len(l)>>1] = l[len(l)>>1], l[-1]
idx_pivot = len(l)>>1
if l[0] > l[-1]:
l[0], l[-1] = l[-1], l[0]
i = 1
j = len(l)-1
while j >= i:
# increase i until l[i] is larger than pivot
while not l[i] >= pivot:
i+=1
# decrease j until l[j] is smaller than pivot
while not l[j] <= pivot:
j-=1
# Indeces hace crossed, brak while loop
if j < i:
break
# swap elements i and j
l[i], l[j] = l[j], l[i]
# if the pivot is swaped, the index of the pivot changes and has to be
# updated.
if i == idx_pivot:
idx_pivot = j
i += 1
elif j == idx_pivot:
idx_pivot = i
j -= 1
# split list below and above pivot
l_lower = l[:idx_pivot]
l_upper = l[idx_pivot+1:]
# recursively call quicksort for lower and upper list
l_lower = quicksort(l_lower)
l_upper = quicksort(l_upper)
return l_lower + [pivot] + l_upper
def n(x, A=256/(5*np.pi**(1.5)), a=2.4, b=0.25, c=1.6, N=100):
"""Number density profile"""
return A*N*(x/b)**(a-3)*np.exp(-(x/b)**c)
def pdx(x, A=256/(5*np.pi**(1.5)), a=2.4, b=0.25, c=1.6):
"""Number density profile"""
return 4*np.pi*x**2*A*(x/b)**(a-3)*np.exp(-(x/b)**c)
def poisson(k,l):
"""Poisson probability calculator. The caculation of e^-(l)l^k/k! is
performed in a loop. Thus, k! and l^k do not have to be calculated directly.
This prevents overflows up to higher k and l compared to direct caculations.
Args:
k (int): Integer for which to calculate probability
l (float): Positive value for the mean of the distribution
Returns:
float: Poisson probability for P(k,l)
Raises:
ValueError: l should be larger than 0.
ValueError: k should be an integer.
"""
# Check l and k allowed values
if l<0:
raise ValueError("l should be larger than 0.")
elif (
not isinstance(k, int) and
not isinstance(k, np.int32) and
not isinstance(k, np.int64)
):
raise ValueError("k should be an integer not {}.".format(type(k)))
elif k < 0:
ValueError("k should be larger than 0.")
if k == 0:
# l^k=1 and k! = 1
return np.exp(-l)
else:
p = 1
# Calculate l^k/k!*e^-l using a product
e = np.exp(-l/k) # calculate ones
for f in range(1, k+1):
p*=l/f*e
return p
if __name__=='__main__':
# Setting constants
seed = 42
plot_dir = './plots/'
output_dir = './output/'
print('random seed: ', seed)
#2a
minus_Ndx = lambda x : -4*np.pi*x**2*n(x)
# find minimum using golden section method
x_max_Ndx = golden_section(minus_Ndx, 1e-4, 1)
max_Ndx = -1*minus_Ndx(x_max_Ndx)
with open(os.path.join(output_dir, '2a_x_max_Ndx.txt'), 'w') as f:
f.write('{:0.5f}'.format(x_max_Ndx))
with open(os.path.join(output_dir, '2a_max_Ndx.txt'), 'w') as f:
f.write('{:0.2f}'.format(max_Ndx))
#2b
rng = RNG(seed+1)
# sample p(x)dx using rejection sampling
sample = rejection_sampling(pdx, pdx(x_max_Ndx), [1e-4,5], 10000, rng)
x_sample = np.logspace(-4,np.log10(5),20)
x_pdx = np.logspace(-4,np.log10(5),200)
# plotting results
plt.figure(figsize=(5,5))
plt.hist(sample,x_sample, density=True, label='sample')
plt.plot(x_pdx, pdx(x_pdx), label='analytical')
plt.xscale('log')
plt.xlabel(r'$r/r_\mathrm{vir}$')
plt.ylabel('density')
plt.axis(xmin=1e-4, xmax=5)
plt.legend()
plt.tight_layout()
plt.savefig(os.path.join(plot_dir, '2b_rejection_sampling.png'))
plt.clf()
#2c
random_n = [next(rng) for _ in range(len(sample))]
# Sort a randoml list of the same length as the sample and save the
# permutations. This yields a list containing randomly shuffled indexes.
_, index_array = quicksort_with_indexing(
random_n, list(range(len(random_n)))
)
# Use shuffling indices to select 100 random samples
sample_selection = np.array(sample)[index_array[:100]].tolist()
# Sort random sample using quicksort
sorted_sample_selection = np.array(quicksort(sample_selection))
# Calculate cummulative sum at every radius r
bins = np.logspace(-4,np.log10(5),500)
cumsum_sorted_sample_selection = []
for i in bins:
cumsum_sorted_sample_selection.append(
len(sorted_sample_selection[sorted_sample_selection<i])
)
# plotting
plt.figure(figsize=(5,5))
plt.semilogx(bins, cumsum_sorted_sample_selection)
plt.xlabel(r'$r/r_\mathrm{vir}$')
plt.ylabel('number of galaxies with radius')
plt.axis(
xmin=1e-4,
xmax=5,
ymin=0,
ymax=cumsum_sorted_sample_selection[-1]+0.5
)
plt.tight_layout()
plt.savefig(os.path.join(plot_dir, '2c_galaxies_within_radius.png'))
plt.clf()
#2d
# Use plt.hist to obtain counts in bins
histogram = plt.hist(sample, x_sample, density=False)
# obtain largest radial bin
radial_bin_edges = (
histogram[1][np.argmax(histogram[0])],
histogram[1][np.argmax(histogram[0])+1]
)
sample_selection = [
i and j for i, j in zip(
sample>radial_bin_edges[0],
sample<radial_bin_edges[1]
)
]
sample_radial_bin = np.array(sample)[sample_selection]
# sort sample in radial bin to efficiently obtain percentiles
sorted_sample_radial_bin = quicksort(sample_radial_bin.tolist())
N = len(sample_radial_bin)
median = sorted_sample_radial_bin[N>>1]
percentile_16 = sorted_sample_radial_bin[int(0.16*N)]
percentile_84 = sorted_sample_radial_bin[int(0.84*N)]
# write output to file
with open(os.path.join(output_dir, '2d_16th_percentile.txt'), 'w') as f:
f.write('{:.2f}'.format(percentile_16))
with open(os.path.join(output_dir, '2d_median.txt'), 'w') as f:
f.write('{:.2f}'.format(median))
with open(os.path.join(output_dir, '2d_84th_percentile.txt'), 'w') as f:
f.write('{:.2f}'.format(percentile_84))
# devide galaxies in halos containing 100 galaxies
sample_100_bins = [sample[i:i+100] for i in range(0,len(sample), 100)]
# Calculate number of
numbers_in_bin_in_halo = []
for s in sample_100_bins:
sample_selection = [
i and j for i, j in zip(
s>radial_bin_edges[0],
s<radial_bin_edges[1]
)
]
numbers_in_bin_in_halo.append(sum(sample_selection))
# calculate poissonian mean and 1-sigma
poisson_mean = sum(numbers_in_bin_in_halo)/len(numbers_in_bin_in_halo)
poisson_1sig = poisson_mean**0.5
# plot bar plot with number of galaxies in each halo
plt.figure(figsize=(5,5))
plt.bar(
list(range(len(numbers_in_bin_in_halo))),
numbers_in_bin_in_halo,
width=1,
zorder=0,
alpha=0.8
)
plt.hlines(
poisson_mean,
0,
100,
ls='-',
color='C1',
label=r'$\lambda$'
)
plt.hlines(
poisson_mean - poisson_1sig,
0,
100,
ls='--',
color='C1',
label=r'$\lambda \pm 1\sigma$')
plt.hlines(
poisson_mean + poisson_1sig,
0,
100,
ls='--',
color='C1')
plt.axis(
xmin=0,
xmax=99,
ymin=poisson_mean - 3*poisson_1sig,
ymax=poisson_mean + 3*poisson_1sig
)
plt.xlabel('Halo number')
plt.ylabel('Number of satellite galaxies')
plt.legend()
plt.tight_layout()
plt.savefig(os.path.join(plot_dir, '2d_bar_plot_counts_per_halo.png'))
plt.clf()
# Plot density of number counts in bins
plt.hist(
numbers_in_bin_in_halo,
bins=9,
density=True,
label='number of galaxies'
)
plt.plot(
np.arange(20,60,1),
[poisson(x, poisson_mean) for x in np.arange(20,60,1)],
label = r'$P_{36}(x)$'
)
plt.axis(
xmin=poisson_mean - 2*poisson_1sig,
xmax=poisson_mean + 2*poisson_1sig
)
plt.xlabel(r'number of galaxies ($x$)')
plt.ylabel('number denisty')
plt.legend()
plt.tight_layout()
plt.savefig(os.path.join(plot_dir, '2d_number_denisty_counts_per_halo.png'))
plt.clf() |
''' Sprawdzanie czy graf nieskierowany jest dwudzielny (czyli czy da się podzielić jego
wierzchołki na dwa zbiory, takie że krawędzie łączą jedynie wierzchołki z różnych zbiorów) '''
# robimy BFS (lub DFS) przydzielajac wiercholkom "kolory"
from collections import deque
def isBipartite(G,s): # s - wierzcholek startowy
q=deque()
color=[None]*len(G.arr)
visited=[False]*len(G.arr)
q.appendleft(s)
visited[s]=True
color[s]=0
while(len(q)!=0):
if len(q)==0 : q=deque()
a=q.pop()
for el in G.arr[a][1:] : # przegladamy wszystkich sasiadow (w danym wierszu indeksy od 1 do konca)
if color[el]==color[a] : return False,color
elif not visited[el] :
visited[el]=True
color[el]=1-color[a] # dzieki temu naprzemian przypisujemy kolory 0 i 1
q.appendleft(el)
return True,color
# reprezentacja poprzez listy adjacencji
class graph:
def __init__(self,size):
self.size=size
self.arr=[[i] for i in range(size)]
def add_edge(self,v,u): # krawedz z v do u
self.arr[v].append(u)
def printG(self):
print("\n")
for i in self.arr:
for j in i:
print(j,end=" ")
print("\n")
G=graph(4)
G.add_edge(1,0)
G.add_edge(1,2)
G.add_edge(0,2)
G.add_edge(3,2)
G.add_edge(1,3)
G.add_edge(0,1)
G.add_edge(2,1)
G.add_edge(2,0)
G.add_edge(2,3)
G.add_edge(3,1)
G.printG()
print(isBipartite(G,0))
|
from django.shortcuts import render
from .models import Wheel, Nav, test_table
from django.forms.models import model_to_dict
from django.core import serializers
import json
import pandas as pd
from django.http import HttpResponse
from data_static import *
# Create your views here.
def jsdaoru(request):
wheelsList = Wheel.objects.all()
name = list(Wheel.objects.values_list('name', flat=True))
data = list(Wheel.objects.values_list('trackid', flat=True))
return render(request, 'axf/js_daoru.html', {"wheelsList": wheelsList, "name": name, "data": data})
def index(request):
StudentID = int(request.POST.get("StudentID", '15672'))
print(StudentID)
grade = request.POST.get("grade")
mes_sub_name = request.POST.get("subject", '物理')
other = request.POST.getlist("other")
student_info, all_cla_name = student_info_desc(StudentID)
table_1_series, subject_all, exam_name = student_info_chengji(StudentID=StudentID, mes_sub_name=mes_sub_name,score='mes_Score')
table_1_series_rank = student_info_chengji(StudentID=StudentID, mes_sub_name=mes_sub_name,
score='mes_Score_rank')[0]
consumption_1_series, kind_all, days_7 = student_consumption_desc(StudentID=StudentID)
if mes_sub_name == 0:
return HttpResponse("您输入的学生或课程未查询到,请检查后重新输入!")
student_info_name = [
'性别'
, '民族'
, '出生日期'
, '班级名'
, '家庭住址'
, '家庭类型'
, '政治面貌'
, '班级ID'
, '班级学期'
, '是否住校'
, '是否退学'
, '宿舍号']
student_info_value = list(student_info.values[0])[2:]
return render(request, 'axf/index.html',
{"StudentName": student_info.bf_Name.values[0],'table_1_series_rank':json.dumps(table_1_series_rank), 'cla_Name': student_info.cla_Name.values[0],
'all_cla_name':all_cla_name, "mes_sub_name": mes_sub_name, "StudentID":
student_info.bf_StudentID.values[0],
"student_info_name": student_info_name, "student_info_value": student_info_value,
"table_1_series": json.dumps(table_1_series), "subject_all": json.dumps(subject_all),
"exam_name": json.dumps(exam_name),"consumption_1_series":json.dumps(consumption_1_series),"kind_all": json.dumps(kind_all),"days_7": json.dumps(days_7)})
##测试专用
def home(request):
List = ['自强学堂', '渲染Json到模板']
Dict = {'site': [1, 2, 3, 4], 'author': [5, 6, 7, 8]}
exam_numname = [3, 4, 5, 6]
mes_T_Score = [1, 2, 3, 4]
return render(request, 'axf/home.html', {
'List': json.dumps(List),
'Dict': json.dumps(Dict),
'exam_numname': exam_numname,
'mes_T_Score': mes_T_Score
})
##测试专用
def add(request):
List = ['自强学堂', '渲染Json到模板']
Dict = {'site': [1, 2, 3, 4], 'author': [5, 6, 7, 8]}
exam_numname = [3, 4, 5, 6]
mes_T_Score = [1, 2, 3, 4]
return render(request, 'axf/add.html')
|
n = int(input())
li = list(map(int, input().split()))
cnt_li = [1]*n
min_v = min(li)
for i in range(1, n):
tmp = -1
k = -1
bol = 0
for j in range(i):
if li[j] < li[i]:
if cnt_li[j] > tmp:
tmp = cnt_li[j]
k = j
bol = 1
if bol == 1:
cnt_li[i] = cnt_li[k]+1
else:
cnt_li[i] = 1
print(cnt_li)
|
from AlgoExpert.patternmatcher import patternMatcher
import unittest
class TestProgram(unittest.TestCase):
def test_case_1(self):
self.assertEqual(patternMatcher(
"xxyxxy", "gogopowerrangergogopowerranger"), ["go", "powerranger"])
|
#https://programmers.co.kr/learn/courses/30/lessons/42862
def solution(n, lost, reserve):
lost = set(lost)
reserve = set(reserve)
both = lost.intersection(reserve)
lost = lost.difference(both)
reserve = reserve.difference(both)
students = ['student']*n
for l in lost:
students[l-1] = 'lost'
for r in reserve:
students[r-1] = 'reserve'
state = students[0]
for i in range(1,len(students)):
if(students[i] == 'student'):
state = 'student'
continue
if(state != 'student' and state != students[i]):
students[i-1] = 'student'
students[i] = 'student'
state = 'student'
else:
state = students[i]
answer = 0
for s in students:
if(s == 'student' or s == 'reserve'):
answer += 1
return answer
'''
훔쳐지지도, 여분을 가지도 않은 학생을 'student' 말고 다르게 표현할 방법이 있을까?
아니면 체육복을 기준으로 훔쳐지면 -1, 여벌은 +1, 아무것도 아니면 0 으로 표현해도 괜찮을까?
''' |
from timeit import default_timer as timer
from pydispatch import dispatcher
from webserver import WebServer
import subprocess
import time
import random
import math
Web_Server = WebServer() # Webserver running on BBB for an HTML interface
try:
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.PWM as PWM
except:
print "Could not import GPIO library!"
# Hardware Pin defines
# ---------------------------------------
Solenoid1_Pin = "P9_23"
Solenoid2_Pin = "P9_24"
StepperPin0 = "P9_11"
StepperPin1 = "P9_13"
StepperPin2 = "P9_15"
StepperPin3 = "P9_27"
pin_list = [StepperPin0,StepperPin1,StepperPin2,StepperPin3]
# all leds are setup on pwm pins, although not necessary since not all need to fade
LED_Brain_Activity_Pin = "P9_14"
LED_Eye1_Pin = "P9_21"
LED_Eye2_Pin = "P8_13"
LED4_Pin = "P9_16"
LED5_Pin = "P9_12"
LED_Monitor_R_Pin = "P9_22"
LED_Monitor_G_Pin = "P9_42"
LED_Monitor_B_Pin = "P8_19"
Pause_Switch_Pin = "P8_14" # switch for starting and stopping the program
Hall_Limit_Pin = "P8_16"
PIR_Pin = "P8_18"
# ---------------------------------------
# State variables
# ---------------------------------------
'''
Bot states summary:
Bot_State = 200 --- Bot is homing after boot up
Bot_State = 204/205 --- Bot hit a limit switch during homing and reverses direction
Bot_State = 210 --- Bot hit second limit switch and travels to center
Bot_State = 1000 --- Bot is in normal action state
Bot_State = 2000 --- Bot begins varied second pattern
Bot_State = 2100 --- Bot is in begining of second pattern
Bot_State = 2200 --- Bot is in pause of second pattern
Bot_State = 2300 --- Bot is in final fast typing stage of second pattern
Bot_State = 2400 --- Bot is facing screen and typing quickly
Bot_State = 5000 --- Bot hit a limit switch during normal routine and needs to home
'''
Bot_State = 200 # variable for current Bot State
Exit_State = False # flag for state transitions
State_Timer = 0 # timer for how long a state is active
# ---------------------------------------
# General program variables
# ---------------------------------------
Program_Paused_Button = False # Start Stop button for whole program. Acts like a global pause.
Program_Paused_Software = False # Start Stop from a software interface. Also acts as global pause.
PIR_Paused = False # paused if no motion is detected
PIR_Trigger = False # motion detection
Limit_Hit = False # Limit switch for head
Now_time = 0 # variable for current program time in milliseconds
PIR_Max_Time = 180 # seconds
PIR_time = timer()
Homing_Needed = True # if a limit switch is hit then at the next resting state the bot will re-home
Debug_Print_Timer = 0 # seconds tracker for debug messages so as not to clog up program performance
Homing_Hit_Count = 0 # for debugging drift
# ---------------------------------------
# State duration variables, these can be changed to adjust how long a state lasts.
# ---------------------------------------
State1000_Duration = 5 # times in seconds (decimals are fine), State 1000 later gets random variation
State2000_Duration = 2
State2100_Duration = 3
State2200_Duration = 7
State2300_Duration = 4
State2400_Duration = 4
# min and max for random variation of state 1000 time
State1000_Min_Duration = 5 # seconds
State1000_Max_Duration = 10
# ---------------------------------------
# Variables for LED eye Blinking
# ---------------------------------------
Blink_Init_Delay = 1.75 # delay on state 2200 before blink
Blink_Duration = 0.150 # how long a blink ON/OFF lasts for
Blink_Millis = 0 # timer for Blinking pattern
Blinking = False # flag to start counting blinks
Blink_Count = 0
Num_Blinks = 6 # number of blinks by bot at monitor (divide by 2 since on and off)
# ---------------------------------------
# LED variables
# ---------------------------------------
# _Bright vars are 0 - 255 pwm intensity values
LED_Brain_Activity_Bright = 0
LED_Eye1_Bright = 0
LED_Eye2_Bright = 0
LED4_Bright = 0
LED_Monitor_Bright = 0
Fade_Rate = 5 # larger value creates a more rapid fade, could set specific rates for leds
LED_Millis = 0
LED_Millis2 = 0
LED_Update_Time = 0.050
Incoming_Message_Blink_Time = 0.300 #0.100# equals ~5Hz flashing
Full_ON = 255
Full_OFF = 0
Dim_Val = 5 # tune this to correct dim level for monitor LED
# ---------------------------------------
# Solenoid typing variables
# ---------------------------------------
Solenoid1_ON = False
Solenoid2_ON = False
Right_Hand_Turn = True # flag for switching between hands for typing
# typing timing variables
Typing_Timer = 0 # timer variable for the solenoid typing strokes, in milliseconds
Keystroke_Time_Init = 0.150 # time in ms that solenoids will stay on for
Keystroke_Time = Keystroke_Time_Init
Min_Type_Pause = 0.100 # used to prevent the pause time from being less than a tenth of a second, this can be edited
Pause_Time = 2.0 # initial pause time, this will be updated with random variation
# variables for effecting random typing
Sigma_Init = 0.750 # standard deviation in milliseconds for pause time on solenoid actuatation, a large value will create a wider range of randomness
Mean_Init = 1.0 # average time between keystrokes, shorter will create faster typing
Sigma = Sigma_Init
Mean = Mean_Init
Switch_Hand_Val = 10 # initial value, this is used to decide whether to alternate hands
# ---------------------------------------
#Stepper motor variables
# ---------------------------------------
Steps = 0 # variable for commanding the number of Steps for the motor to turn
Look_Left = True # change this to reverse the direction the head stepper motor spins during looks
Look_Right = not Look_Left
Stepper_Direction = Look_Left
Steps_Per_Rev = 4096 # number of Steps per 1 full revolution, beware documentation on cheap stepper motors often don't match actual step number
Step_Count = 0
Move_Steps = 0
Steps_In_Head_Range = 0 # this gets set to be the total steps found in the intended head range of motion
Head_Pos_Tracker = 0 # this is a variable that is used only for tracking the head position in debugging
Center_Head_Step = 0 # step value for center of motion range
Left_Head_Step = 0
Right_Head_Step = 0
# range, 0 - ~ 500
Steps_From_Hall_To_Monitor_Look = 100 # increase/decrease this value to get the head to look farther/less far during the turn to look at the monitor
Send_Step_Command = False
Do_Stepper_Move = False
Stepper_Command_Timer = 0
Last_Stepper_Time = 0
Rand_Stepper_Timer = 5.0 # random delay between small stepper movements
# ---------------------------------------
def gpioSetup(pin,dir):
try:
if dir == "out":
dir = GPIO.OUT
else:
dir = GPIO.IN
GPIO.setup(pin,dir)
except:
pass
def gpioOutput(pin,val):
try:
if val == 0:
val = GPIO.LOW
else:
val = GPIO.HIGH
GPIO.output(pin,val)
except:
print "GPIO output: " + str(pin) + ", " + str(val)
#print Bot_State
pass
def gpioInput(pin):
try:
return GPIO.input(pin)
except:
return 0
def gpioCleanup():
try:
GPIO.cleanup()
except:
pass
def pwmSetup(pin,val):
try:
PWM.start(pin,val)
except:
pass
def pwmSetDutyCycle(channel,duty):
try:
PWM.set_duty_cycle(channel,duty)
except:
print "PWM duty cycle: " + str(channel) + ", " + str(duty)
pass
def pwmCleanup():
try:
PWM.cleanup()
except:
pass
# Hardware Pin modes setup
# ---------------------------------------
gpioSetup(Solenoid1_Pin, "out")
gpioSetup(Solenoid2_Pin, "out")
gpioSetup(StepperPin0, "out")
gpioSetup(StepperPin1, "out")
gpioSetup(StepperPin2, "out")
gpioSetup(StepperPin3, "out")
gpioSetup(LED5_Pin, "out")
# start all of the LED PWM pins at 0 and update their duty cycle later
pwmSetup(LED_Brain_Activity_Pin, 0)
pwmSetup(LED_Eye1_Pin, 0)
pwmSetup(LED_Eye2_Pin, 0)
pwmSetup(LED4_Pin, 0)
pwmSetup(LED_Monitor_R_Pin, 0)
pwmSetup(LED_Monitor_G_Pin, 0)
pwmSetup(LED_Monitor_B_Pin, 0)
gpioSetup(Pause_Switch_Pin, "in") # BBB has internal pull down resistors
gpioSetup(Hall_Limit_Pin, "in")
gpioSetup(PIR_Pin, "in")
# ---------------------------------------
State1000_Duration = random.uniform(State1000_Min_Duration, State1000_Max_Duration) # random time between min and max
def Software_PauseToggle():
global Program_Paused_Software
Program_Paused_Software = not Program_Paused_Software
def Play_WAV(filename):
subprocess.Popen("aplay " + filename, shell=True)
def LED_SetAll(val=0):
if val == 1 or val == "1":
val = Full_ON
else:
val = Full_OFF
print "Set all LED's: " + str(val)
pwmSetDutyCycle(LED_Brain_Activity_Pin, val)
pwmSetDutyCycle(LED_Eye1_Pin, val)
pwmSetDutyCycle(LED_Eye2_Pin, val)
pwmSetDutyCycle(LED4_Pin, val)
pwmSetDutyCycle(LED_Monitor_R_Pin, val)
pwmSetDutyCycle(LED_Monitor_G_Pin, val)
pwmSetDutyCycle(LED_Monitor_B_Pin, val)
def WebCallback(functionName=None,arg1=None,arg2=None):
args = "()"
if arg1 and not arg2:
args = "('"+str(arg1)+"')"
elif arg1 and arg2:
args = "('"+str(arg1)+"','"+str(arg2)+"')"
exec functionName+args
dispatcher.connect( WebCallback, signal="call_function", sender=dispatcher.Any )
'''
Stepper motor function, non-blocking
'''
def Stepper_Command(Steps, direction, pins):
if Steps == 0:
gpioOutput(StepperPin0, 0)
gpioOutput(StepperPin1, 0)
gpioOutput(StepperPin2, 0)
gpioOutput(StepperPin3, 1)
elif Steps == 1:
gpioOutput(StepperPin0, 0)
gpioOutput(StepperPin1, 0)
gpioOutput(StepperPin2, 1)
gpioOutput(StepperPin3, 1)
elif Steps == 2:
gpioOutput(StepperPin0, 0)
gpioOutput(StepperPin1, 0)
gpioOutput(StepperPin2, 1)
gpioOutput(StepperPin3, 0)
elif Steps == 3:
gpioOutput(StepperPin0, 0)
gpioOutput(StepperPin1, 1)
gpioOutput(StepperPin2, 1)
gpioOutput(StepperPin3, 0)
elif Steps == 4:
gpioOutput(StepperPin0, 0)
gpioOutput(StepperPin1, 1)
gpioOutput(StepperPin2, 0)
gpioOutput(StepperPin3, 0)
elif Steps == 5:
gpioOutput(StepperPin0, 1)
gpioOutput(StepperPin1, 1)
gpioOutput(StepperPin2, 0)
gpioOutput(StepperPin3, 0)
elif Steps == 6:
gpioOutput(StepperPin0, 1)
gpioOutput(StepperPin1, 0)
gpioOutput(StepperPin2, 0)
gpioOutput(StepperPin3, 0)
elif Steps == 7:
gpioOutput(StepperPin0, 1)
gpioOutput(StepperPin1, 0)
gpioOutput(StepperPin2, 0)
gpioOutput(StepperPin3, 1)
else:
gpioOutput(StepperPin0, 0)
gpioOutput(StepperPin1, 0)
gpioOutput(StepperPin2, 0)
gpioOutput(StepperPin3, 0)
if direction:
Steps += 1
else:
Steps -= 1
if Steps > 7: Steps = 0
if Steps < 0: Steps = 7
return Steps
'''
End of, Stepper motor function, non-blocking
'''
'''
Threshold led function
'''
def threshold_led(in_val):
if in_val > 100: in_val = 100
elif in_val < 0: in_val = 0
## in_val = in_val/255 * 100 # scales to 0-100 range
return in_val
'''
End of Threshold led function
'''
'''
Function for generating random gaussian numbers given a standard deviation and Mean
using the Box-Muller method.
'''
def box_muller(Sigma, Mean):
while True:
x1 = 2.0 * random.uniform(0, 99) / 100 - 1.0
x2 = 2.0 * random.uniform(0, 99) / 100 - 1.0
w = x1 * x1 + x2 * x2
if w < 1.0: break
w = math.sqrt( (-2.0 * math.log( w ) ) / w )
y1 = x1 * w
y2 = x2 * w
return ( Mean + y1 * Sigma )
'''
End of, Function for generating random gaussian numbers given a standard deviation and Mean
using the Box-Muller method.
'''
try:
while True:
pause = False
Now_time = timer() # update program time keeper
# debug printing once/second
## if Now_time - Debug_Print_Timer >= 1:
## Debug_Print_Timer = Now_time
## print("Head position: ",Head_Pos_Tracker)
if Program_Paused_Software == True:
pause = True
else:
Program_Paused_Button = gpioInput(Pause_Switch_Pin) # check state of program ON OFF switch
pause = Program_Paused_Button
PIR_Trigger = gpioInput(PIR_Pin)
Limit_Hit = not gpioInput(Hall_Limit_Pin) # normally high switch
if Bot_State > 500 and Bot_State < 5000 and Limit_Hit:
Homing_Needed = True
Bot_State = 5000
Homing_Hit_Count += 1
elif Bot_State == 5000 and Limit_Hit: # this state backs the head off of whatever limit it hit
Stepper_Direction = not Stepper_Direction
if Send_Step_Command:
Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
if not Stepper_Direction: Head_Pos_Tracker += 1
else: Head_Pos_Tracker -= 1
Send_Step_Command = False
elif Bot_State == 5000 and not Limit_Hit:
Bot_State = 200 # homing routine
time.sleep(0.001)
if not pause:
if PIR_Trigger:
PIR_time = Now_time
PIR_Paused = False
if Now_time - PIR_time > PIR_Max_Time:
PIR_Paused = True
elif Now_time - PIR_time > PIR_Max_Time*0.9:
print(int(PIR_Max_Time - (Now_time - PIR_time))," secs till pause...")
if not PIR_Paused:
# State transitions
# ---------------------------------------
if Exit_State:
Exit_State = False
if Bot_State == 1000:
Bot_State = 2000
State1000_Duration = random.uniform(State1000_Min_Duration, State1000_Max_Duration) # generate new random state 1 duration
elif Bot_State == 2000: Bot_State = 2100
elif Bot_State == 2100: Bot_State = 2200
elif Bot_State == 2200: Bot_State = 2300
elif Bot_State == 2300: Bot_State = 2400
elif Bot_State == 2400: Bot_State = 1000
print("Entering state: ")
print(Bot_State)
Step_Count = 0
# this chunk can be condensed
if Bot_State == 1000 and Now_time - State_Timer > State1000_Duration and not Do_Stepper_Move:
State_Timer = Now_time
Exit_State = True
elif Bot_State == 2000 and Now_time - State_Timer > State2000_Duration:
State_Timer = Now_time
Exit_State = True
elif Bot_State == 2100 and Now_time - State_Timer > State2100_Duration and not Do_Stepper_Move:
State_Timer = Now_time
Exit_State = True
elif Bot_State == 2200 and Now_time - State_Timer > State2200_Duration:
State_Timer = Now_time
Exit_State = True
Blinking = False # reset on state exit
Blink_Count = 0 # reset on state exit
if Head_Pos_Tracker < Center_Head_Step: Count_Up = True
elif Head_Pos_Tracker > Center_Head_Step: Count_Up = False
else: print("Unexpected head position.")
elif Bot_State == 2300 and Now_time - State_Timer > State2300_Duration and not Do_Stepper_Move:
State_Timer = Now_time
Exit_State = True
elif Bot_State == 2400 and Now_time - State_Timer > State2400_Duration:
State_Timer = Now_time
Exit_State = True
if Bot_State == 1000 and Homing_Needed:
Bot_State = 200
State_Timer = Now_time
Exit_State = True
# End of State Transitions
# ---------------------------------------
# Homing Sequence
# ---------------------------------------
# there is an uncovered case here that may need to be addressed, that is if the program boots
# with the head not between the two limit switches or sitting on the right limit switch
if Bot_State == 200 and Homing_Needed:
Homing_Needed = False
print("Homing routine in progress...")
if Bot_State == 200 and not Limit_Hit: # Go left till hall triggers
Stepper_Direction = Look_Left
if Send_Step_Command:
Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
if not Stepper_Direction: Head_Pos_Tracker += 1
else: Head_Pos_Tracker -= 1
Send_Step_Command = False
elif Bot_State == 200 and Limit_Hit:
Head_Pos_Tracker = 0 # resetting the global position tracker meaning left is 0 position
Step_Count = 0 # this could solve the head angle drift issue
Bot_State = 205 # Move off limit magnet
print("Hit limit switch")
elif Bot_State == 205 and Limit_Hit: # Move off limit magnet
Stepper_Direction = Look_Right
if Send_Step_Command:
Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
if not Stepper_Direction: Head_Pos_Tracker += 1
else: Head_Pos_Tracker -= 1
Send_Step_Command = False
Step_Count += 1
elif Bot_State == 205 and not Limit_Hit: # cleared hall effect limit magnet
Bot_State = 206
elif Bot_State == 206 and not Limit_Hit: # moving towards next hall limit switch
Stepper_Direction = Look_Right
if Send_Step_Command:
Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
if not Stepper_Direction: Head_Pos_Tracker += 1
else: Head_Pos_Tracker -= 1
Send_Step_Command = False
Step_Count += 1
elif Bot_State == 206 and Limit_Hit:
Bot_State = 210
Move_Steps = Step_Count/2
Center_Head_Step = Step_Count/2 # step value for center of motion range
Steps_In_Head_Range = Step_Count
print("Steps in head range: ", Steps_In_Head_Range) # should note if this changes
Step_Count = 0
print("Hit limit switch 2")
elif Bot_State == 210:
Stepper_Direction = Look_Left
if Send_Step_Command and Step_Count < Move_Steps:
Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
if not Stepper_Direction: Head_Pos_Tracker += 1
else: Head_Pos_Tracker -= 1
Send_Step_Command = False
Step_Count += 1
elif Step_Count >= Move_Steps:
Bot_State = 1000
print("Bot successfully homed and centered")
# End of Homing Sequence
# ---------------------------------------
# Solenoid timing ---- always running but solenoid outputs are disabled/enabled in the following program section
# ---------------------------------------
if not Solenoid1_ON and not Solenoid2_ON:
if Now_time - Typing_Timer >= Pause_Time:
Typing_Timer = Now_time # reset typing timer
if Right_Hand_Turn:
Solenoid1_ON = True
Right_Hand_Turn = False
elif not Right_Hand_Turn:
Solenoid2_ON = True
Right_Hand_Turn = True
elif Solenoid1_ON or Solenoid2_ON:
if Bot_State >= 2000 and Bot_State < 3000: # fast typing state
Keystroke_Time = Keystroke_Time_Init / 2
Sigma = Sigma_Init / 2
Mean = Mean_Init / 3
else:
Keystroke_Time = Keystroke_Time_Init
Sigma = Sigma_Init
Mean = Mean_Init
if Now_time - Typing_Timer > Keystroke_Time:
Typing_Timer = Now_time # reset typing timer
Solenoid1_ON = False
Solenoid2_ON = False
# at the end of each stroke calculate a new pause time
Pause_Time = box_muller(Sigma, Mean)
if Pause_Time <= Min_Type_Pause: Pause_Time = Min_Type_Pause# don't allow the pause time to be less than the min, this could be edited
# at the end of each stroke decide based on probability if the same hand should type again
Switch_Hand_Val = random.uniform(0, 10)
if Switch_Hand_Val <= 2.5: Right_Hand_Turn = not Right_Hand_Turn
# End of Solenoid timing
# ---------------------------------------
# Solenoid outputs enable/disable based on state
# ---------------------------------------
if (Bot_State >= 2000 and Bot_State < 2300) or Bot_State < 500:
Solenoid1_ON = False
Solenoid2_ON = False
## print("RHT: ", Right_Hand_Turn," Sol1: ", Solenoid1_ON," Sol2: ",Solenoid2_ON)
if Solenoid1_ON: gpioOutput(Solenoid1_Pin, 1)
else: gpioOutput(Solenoid1_Pin, 0)
if Solenoid2_ON: gpioOutput(Solenoid2_Pin, 1)
else: gpioOutput(Solenoid2_Pin, 0)
# End of Solenoid outputs enable/disable based on state
# ---------------------------------------
# LED handling
# ---------------------------------------
# Brain Activity Monitor LED always fades in and out
if Now_time - LED_Millis2 >= LED_Update_Time:
LED_Millis2 = Now_time
if LED_Brain_Activity_Bright >= 100 and Fade_Rate > 0: Fade_Rate = -Fade_Rate
elif LED_Brain_Activity_Bright <= 0 and Fade_Rate < 0: Fade_Rate = -Fade_Rate
LED_Brain_Activity_Bright += Fade_Rate
if Bot_State == 1000:
LED_Eye1_Bright = Full_ON
LED_Eye2_Bright = Full_ON
LED4_Bright = Full_ON
LED_Monitor_Bright = Full_OFF
else:
LED4_Bright = Full_ON
if Bot_State >= 2000 and Bot_State < 2200: # LED monitor rapid Blinking for incoming message
if Now_time - LED_Millis >= Incoming_Message_Blink_Time:
LED_Millis = Now_time
if LED_Monitor_Bright >= Full_ON: LED_Monitor_Bright = Full_OFF
elif LED_Monitor_Bright <= Full_OFF: LED_Monitor_Bright = Full_ON
elif Bot_State == 2200: # LED monitor fading in and out
if Now_time - LED_Millis >= LED_Update_Time:
LED_Millis = Now_time
if LED_Monitor_Bright >= 100 and Fade_Rate > 0: Fade_Rate = -Fade_Rate
elif LED_Monitor_Bright <= Dim_Val and Fade_Rate < 0: Fade_Rate = -Fade_Rate
LED_Monitor_Bright += 2 * Fade_Rate
if Now_time - Blink_Millis >= Blink_Init_Delay and not Blinking:
Blink_Millis = Now_time
Blinking = True
if Blinking and Blink_Count <= Num_Blinks and Now_time - Blink_Millis >= Blink_Duration:
Blink_Millis = Now_time
if LED_Eye1_Bright == Full_ON:
LED_Eye1_Bright = Full_OFF
LED_Eye2_Bright = Full_OFF
else:
LED_Eye1_Bright = Full_ON
LED_Eye2_Bright = Full_ON
Blink_Count += 1
elif Blink_Count > Num_Blinks: # so eyes always end ON even if number of blinks is odd
LED_Eye1_Bright = Full_ON
LED_Eye2_Bright = Full_ON
elif Bot_State >= 2300 and Bot_State < 3000: # LED monitor goes dim
if Now_time - LED_Millis >= LED_Update_Time:
LED_Millis = Now_time
if LED_Monitor_Bright > Dim_Val: LED_Monitor_Bright -= abs(Fade_Rate)
LED_Eye1_Bright = Full_ON
LED_Eye2_Bright = Full_ON
# End of LED handling
# ---------------------------------------
# Stepper handling
# ---------------------------------------
#LowRPM = 1
#HighRPM = 15
#Speed = 15 # tracks from 0 - 100 speed
# can switch to micros to get a bit faster than the current 1 ms which is ~ 15 RPM
if Now_time - Stepper_Command_Timer >= 0.005:
Stepper_Command_Timer = Now_time
Send_Step_Command = True
## if Bot_State == 1000:
## if Now_time - Last_Stepper_Time > Rand_Stepper_Timer and not Do_Stepper_Move:
## Last_Stepper_Time = Now_time
## Do_Stepper_Move = True
## if random.uniform(0, 10) > 5: Stepper_Direction = Look_Right
## else: Stepper_Direction = Look_Left
## Move_Steps = random.uniform(10, 150)
## Rand_Stepper_Timer = random.uniform(4, 10) # time in seconds
##
## if Step_Count >= Move_Steps * 2 or Limit_Hit:
## Step_Count = 0
## Do_Stepper_Move = False # finished move
##
## elif Step_Count < Move_Steps * 2 and Send_Step_Command and Do_Stepper_Move:
## if Step_Count == Move_Steps: Stepper_Direction = not Stepper_Direction
## Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
## if not Stepper_Direction: Head_Pos_Tracker += 1
## else: Head_Pos_Tracker -= 1
## Send_Step_Command = False
## Step_Count += 1
if Bot_State == 2100:
if not Do_Stepper_Move: # set it once when entering the state
Move_Steps = Steps_In_Head_Range - Head_Pos_Tracker - Steps_From_Hall_To_Monitor_Look
print(Steps_In_Head_Range, Head_Pos_Tracker, Steps_From_Hall_To_Monitor_Look, Move_Steps)
Do_Stepper_Move = True
Stepper_Direction = Look_Left
print("move steps:",Move_Steps," Step_Count:",Step_Count)
if Step_Count >= Move_Steps or Limit_Hit:
Do_Stepper_Move = False
Exit_State = True # change state
elif Step_Count < Move_Steps and Send_Step_Command and Do_Stepper_Move:
Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
if not Stepper_Direction: Head_Pos_Tracker += 1
else: Head_Pos_Tracker -= 1
Send_Step_Command = False
Step_Count += 1
elif Bot_State == 2300:
Do_Stepper_Move = True
Stepper_Direction = Look_Right
## print(Head_Pos_Tracker,Center_Head_Step, Step_Count,Move_Steps)
if Count_Up:
if Head_Pos_Tracker >= Center_Head_Step or Limit_Hit:
Do_Stepper_Move = False
elif Head_Pos_Tracker < Center_Head_Step and Send_Step_Command and Do_Stepper_Move:
Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
if not Stepper_Direction: Head_Pos_Tracker += 1
else: Head_Pos_Tracker -= 1
Send_Step_Command = False
Step_Count += 1
elif not Count_Up:
if Head_Pos_Tracker <= Center_Head_Step or Limit_Hit:
Do_Stepper_Move = False
elif Head_Pos_Tracker > Center_Head_Step and Send_Step_Command and Do_Stepper_Move:
Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
if not Stepper_Direction: Head_Pos_Tracker += 1
else: Head_Pos_Tracker -= 1
Send_Step_Command = False
Step_Count += 1
# old system.
## if Step_Count >= Move_Steps or Limit_Hit:
## Do_Stepper_Move = False
## elif Step_Count < Move_Steps and Send_Step_Command and Do_Stepper_Move:
## Steps = Stepper_Command(Steps, Stepper_Direction, pin_list)
## if not Stepper_Direction: Head_Pos_Tracker += 1
## else: Head_Pos_Tracker -= 1
## Send_Step_Command = False
## Step_Count += 1
# End of Stepper handling
# ---------------------------------------
elif not Program_Paused_Software and PIR_Paused: # If program paused via physical button
LED_Brain_Activity_Bright = Full_OFF
LED_Eye1_Bright = Full_OFF
LED_Eye2_Bright = Full_OFF
LED4_Bright = Full_OFF
LED_Monitor_Bright = Full_OFF
print("program paused...")
if not Program_Paused_Software:
# thresholding LED vals for overshoot on 255 and 0
# also adjusting to 0-100 scale for python IO library
LED_Brain_Activity_Bright = threshold_led(LED_Brain_Activity_Bright)
LED_Eye1_Bright = threshold_led(LED_Eye1_Bright)
LED_Eye2_Bright = threshold_led(LED_Eye2_Bright)
LED4_Bright = threshold_led(LED4_Bright)
LED_Monitor_Bright = threshold_led(LED_Monitor_Bright)
# writing LED outputs
pwmSetDutyCycle(LED_Brain_Activity_Pin, LED_Brain_Activity_Bright)
pwmSetDutyCycle(LED_Eye1_Pin, LED_Eye1_Bright)
pwmSetDutyCycle(LED_Eye2_Pin, LED_Eye2_Bright)
pwmSetDutyCycle(LED4_Pin, LED4_Bright)
# using a single bright var for the monitor will default it to white
# color variability should be added
pwmSetDutyCycle(LED_Monitor_R_Pin, LED_Monitor_Bright)
pwmSetDutyCycle(LED_Monitor_G_Pin, LED_Monitor_Bright)
pwmSetDutyCycle(LED_Monitor_B_Pin, LED_Monitor_Bright)
except KeyboardInterrupt:
print("interrupted by user keyboard")
print("Limit switches hit: ",Homing_Hit_Count )
gpioOutput(Solenoid1_Pin, 0)
gpioOutput(Solenoid2_Pin, 0)
# turning off LEDs at program end
LED_Brain_Activity_Bright = Full_OFF
LED_Eye1_Bright = Full_OFF
LED_Eye2_Bright = Full_OFF
LED4_Bright = Full_OFF
LED_Monitor_Bright = Full_OFF
pwmSetDutyCycle(LED_Brain_Activity_Pin, LED_Brain_Activity_Bright)
pwmSetDutyCycle(LED_Eye1_Pin, LED_Eye1_Bright)
pwmSetDutyCycle(LED_Eye2_Pin, LED_Eye2_Bright)
pwmSetDutyCycle(LED4_Pin, LED4_Bright)
pwmSetDutyCycle(LED_Monitor_R_Pin, LED_Monitor_Bright)
pwmSetDutyCycle(LED_Monitor_G_Pin, LED_Monitor_Bright)
pwmSetDutyCycle(LED_Monitor_B_Pin, LED_Monitor_Bright)
# GPIO cleanup on exit
pwmCleanup()
gpioCleanup()
Web_Server.shutdown()
|
# 2019 카카오 개발자 겨울 인턴십 / 튜플
# https://programmers.co.kr/learn/courses/30/lessons/64065?language=python3
import sys
input = sys.stdin.readline
def solution(s):
answer = []
ss = s[2:-2].split("},{")
ss.sort(key = len)
for i in ss:
arr = i.split(",")
for a in arr:
if int(a) not in answer:
answer.append(int(a))
return answer
s = "{{1,2,3},{2,1},{1,2,4,3},{2}}"
print(solution(s)) |
#it prints the birthday message
def name():
x = str(input("Enter your name\n"))
print("Happy Birthday" ,x)
name() |
def count_set_bits(n):
count = 0
while n != 0:
if (n & 1) > 0:
count += 1
n = n >> 1
return count
def count_set_bits_brian_kerningam(n):
count = 0
while n > 0:
n = n & (n - 1) # 4 = 100, 3 = 011 bitwise & will make left most bit 0. This is true for all n and n-1
count += 1
return count
print(count_set_bits_brian_kerningam(7)) |
import configparser
import os.path
import requests
import smtplib
import json
config = configparser.ConfigParser()
if os.path.isfile('weathermailer_custom.conf'):
config.read('weathermailer_custom.conf')
else:
config.read('weathermailer.conf')
phonenumber = list(config.items('phonenumbers'))
smtpserver = config['SETTINGS']['smtpserver']
coords = config['SETTINGS']['coords']
class weather():
def __init__(self, phonenumber, smtpserver, coords):
self.phonenumber = phonenumber
self.smtpserver = smtpserver
self.coords= coords
self.url = 'https://api.weather.gov/points/{0}/forecast'.format(self.coords)
self.message = ""
self.subject = ""
def getweather(self):
r = requests.get(self.url)
self.response = r.json()
self.tonightweather = self.response['properties']['periods'][1]['detailedForecast']
self.allweatherstatuses = self.response['properties']['periods']
self.afternoonweather = self.response['properties']['periods'][0]['detailedForecast']
def sendmessage(self):
self.smtpobject = smtplib.SMTP(self.smtpserver, 25)
self.messagelenth = len(self.message)
self.messages2send = int(self.messagelenth / 100) + (self.messagelenth % 100 > 0)
for order, number in self.phonenumber:
if self.messages2send == 1:
self.smtpobject.sendmail(self.subject, str(number), self.message)
else:
for chunk in range(self.messages2send):
if int(chunk) == 0:
self.smtpobject.sendmail(self.subject, str(number), self.message[0:100])
else:
self.smtpobject.sendmail(self.subject, str(number), self.message[(100 * chunk):(100 * chunk + 100)])
print("Sent the following text '{0}' to {1} in {2} messages".format(self.message, str(number), self.messages2send))
if __name__ == "__main__":
weather = weather(phonenumber, smtpserver, coords)
weather.getweather()
weather.message = weather.afternoonweather
weather.subject = "WeatherBot"
weather.sendmessage() |
#!/usr/bin/env python
import sys
import os
import argparse
from termcolor import colored
import json
from json import encoder
import logging
import subprocess
from ampyutils import exeprogram, amutils, location
import am_config as amc
__author__ = 'amuls'
def treatCmdOpts(argv: list):
"""
Treats the command line options
"""
baseName = os.path.basename(__file__)
amc.cBaseName = colored(baseName, 'yellow')
cFuncName = amc.cBaseName + ': ' + colored(sys._getframe().f_code.co_name, 'green')
helpTxt = baseName + ' convert binary raw data from SBF or UBlox to RINEX Obs & Nav files'
# create the parser for command line arguments
parser = argparse.ArgumentParser(description=helpTxt)
parser.add_argument('-d', '--dir', help='Root directory (default {:s})'.format(colored('.', 'green')), required=False, type=str, default='.')
parser.add_argument('-f', '--file', help='Binary SBF or UBlox file', required=True, type=str)
parser.add_argument('-b', '--binary', help='Select binary format (default {:s})'.format(colored('SBF', 'green')), required=False, type=str, choices=['SBF', 'UBlox'], default='SBF')
parser.add_argument('-r', '--rinexdir', help='Directory for RINEX output (default {:s})'.format(colored('.', 'green')), required=False, type=str, default='.')
parser.add_argument('-v', '--rinexver', help='Select RINEX version (default {:s})'.format(colored('R3', 'green')), required=False, choices=['R3', 'R2'], default='R3')
parser.add_argument('-g', '--gnss', help='GNSS systems to process (default={:s})'.format(colored('gal', 'green')), required=False, default='gal', choices=['gal', 'gps', 'com'])
parser.add_argument('-n', '--naming', help='Enter MARKER DOY YY for naming RINEX output files', nargs=3, required=True)
# parser.add_argument('-c', '--convfile', help='Converted name of RINEX file (default named by converter program)', required=False, default=None)
parser.add_argument('-o', '--overwrite', help='overwrite intermediate files (default {:s})'.format(colored('False', 'green')), action='store_true', required=False)
parser.add_argument('-l', '--logging', help='specify logging level console/file (default {:s})'.format(colored('INFO DEBUG', 'green')), nargs=2, required=False, default=['INFO', 'DEBUG'], choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'NOTSET'])
# drop argv[0]
args = parser.parse_args(argv[1:])
# return arguments
return args.dir, args.file, args.binary, args.rinexdir, args.rinexver, args.gnss, args.naming, args.overwrite, args.logging
def checkValidityArgs(logger: logging.Logger) -> bool:
"""
checks for existence of dirs/files
"""
cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')
# change to baseDir, everything is relative to this directory
logger.info('{func:s}: check existence of rootDir {root:s}'.format(func=cFuncName, root=amc.dRTK['rootDir']))
amc.dRTK['rootDir'] = os.path.expanduser(amc.dRTK['rootDir'])
if not os.path.exists(amc.dRTK['rootDir']):
logger.error('{func:s} !!! Dir {basedir:s} does not exist.'.format(func=cFuncName, basedir=amc.dRTK['rootDir']))
return amc.E_INVALID_ARGS
# make the coplete filename by adding to rootdir and check existence of binary file to convert
amc.dRTK['binFile'] = os.path.join(amc.dRTK['rootDir'], amc.dRTK['binFile'])
logger.info('{func:s}: check existence of binary file {bin:s} to convert'.format(func=cFuncName, bin=amc.dRTK['binFile']))
if not os.access(amc.dRTK['binFile'], os.R_OK):
logger.error('{func:s} !!! binary observation file {bin:s} not accessible.'.format(func=cFuncName, bin=amc.dRTK['binFile']))
return amc.E_FILE_NOT_EXIST
# check existence of rinexdir and create if needed
logger.info('{func:s}: check existence of rinexdir {rinex:s} and create if needed'.format(func=cFuncName, rinex=amc.dRTK['rinexDir']))
# amc.dRTK['rinexDir'] = os.path.join(amc.dRTK['rootDir'], amc.dRTK['rinexDir'])
amutils.mkdir_p(amc.dRTK['rinexDir'])
# check whether the rinexNaming arguments are correctly formatted
amc.dRTK['marker'] = amc.dRTK['rinexNaming'][0]
amc.dRTK['doy'] = amc.dRTK['rinexNaming'][1]
amc.dRTK['yy'] = amc.dRTK['rinexNaming'][2]
if (len(amc.dRTK['marker']) < 4) or (len(amc.dRTK['doy']) != 3) or (len(amc.dRTK['yy']) != 2):
logger.error('{func:s}: Please enter rinexNaming as follows'.format(func=cFuncName))
logger.error('{func:s}: ... marker {marker:s} at least 4 chars'.format(func=cFuncName, marker=amc.dRTK['marker']))
logger.error('{func:s}: ... doy {doy:s} exact 3 chars'.format(func=cFuncName, doy=amc.dRTK['doy']))
logger.error('{func:s}: ... yy {yy:s} exact 2 chars'.format(func=cFuncName, yy=amc.dRTK['yy']))
return amc.E_INVALID_ARGS
return amc.E_SUCCESS
def sbf2rinex(logger: logging.Logger, dGnssSysts: dict):
"""
sbf2rinex converts a SBF file to rinex according to the GNSS systems selected
"""
cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')
# convert to RINEX for selected GNSS system
logger.info('{func:s}: RINEX conversion for {gnss:s}'.format(func=cFuncName, gnss=colored(amc.dRTK['gnssSyst'], 'green')))
# determine systems to exclude, adjust when COM is asked meaning use GAL+GPS
typeNav = ''
if amc.dRTK['gnssSyst'].lower() == 'com':
excludeGNSSs = [key for key, value in dGnssSysts.items() if not (value.lower().startswith('gal') or value.lower().startswith('gps'))]
typeNav = 'P'
else:
excludeGNSSs = [key for key, value in dGnssSysts.items() if not value.lower().startswith(amc.dRTK['gnssSyst'].lower())]
if amc.dRTK['gnssSyst'].lower() == 'gps':
typeNav = 'N'
elif amc.dRTK['gnssSyst'].lower() == 'gal':
typeNav = 'E'
logger.info('{func:s}: excluding GNSS systems {excl!s}'.format(func=cFuncName, excl=excludeGNSSs))
# convert to RINEX observables file
args4SBF2RIN = [amc.dRTK['bin2rnx']['SBF2RIN'], '-f', amc.dRTK['binFile'], '-x', ''.join(excludeGNSSs), '-s', '-D', '-v']
if amc.dRTK['rinexVersion'] == 'R3':
args4SBF2RIN.extend(['-R3'])
else:
args4SBF2RIN.extend(['-R210'])
# create the output RINEX obs file name
amc.dRTK['obs'] = '{marker:s}{doy:s}0.{yy:s}O'.format(marker=amc.dRTK['marker'], doy=amc.dRTK['doy'], yy=amc.dRTK['yy'])
amc.dRTK['obs'] = os.path.join(amc.dRTK['rinexDir'], amc.dRTK['obs'])
args4SBF2RIN.extend(['-o', amc.dRTK['obs']])
logger.info('{func:s}: creating RINEX observation file\n{opts!s}'.format(func=cFuncName, opts=' '.join(args4SBF2RIN)))
# run the sbf2rin program
try:
subprocess.check_call(args4SBF2RIN)
except subprocess.CalledProcessError as e:
# handle errors in the called executable
logger.error('{func:s}: subprocess {proc:s} returned error code {err!s}'.format(func=cFuncName, proc=amc.dRTK['bin2rnx']['SBF2RIN'], err=e))
sys.exit(amc.E_SBF2RIN_ERRCODE)
except OSError as e:
# executable not found
logger.error('{func:s}: subprocess {proc:s} returned error code {err!s}'.format(func=cFuncName, proc=amc.dRTK['bin2rnx']['SBF2RIN'], err=e))
sys.exit(amc.E_OSERROR)
# convert to RINEX NAVIGATION file
args4SBF2RIN = [amc.dRTK['bin2rnx']['SBF2RIN'], '-f', amc.dRTK['binFile'], '-x', ''.join(excludeGNSSs), '-s', '-D', '-v', '-n', typeNav]
if amc.dRTK['rinexVersion'] == 'R3':
args4SBF2RIN.extend(['-R3'])
else:
args4SBF2RIN.extend(['-R210'])
# create the output RINEX obs file name
amc.dRTK['nav'] = '{marker:s}{doy:s}0.{yy:s}{typenav:s}'.format(marker=amc.dRTK['marker'], doy=amc.dRTK['doy'], yy=amc.dRTK['yy'], typenav=typeNav)
amc.dRTK['nav'] = os.path.join(amc.dRTK['rinexDir'], amc.dRTK['nav'])
args4SBF2RIN.extend(['-o', amc.dRTK['nav']])
logger.info('{func:s}: creating RINEX navigation file\n{opts!s}'.format(func=cFuncName, opts=' '.join(args4SBF2RIN)))
# run the sbf2rin program
try:
subprocess.check_call(args4SBF2RIN)
except subprocess.CalledProcessError as e:
# handle errors in the called executable
logger.error('{func:s}: subprocess {proc:s} returned error code {err!s}'.format(func=cFuncName, proc=amc.dRTK['bin2rnx']['SBF2RIN'], err=e))
sys.exit(amc.E_SBF2RIN_ERRCODE)
except OSError as e:
# executable not found
logger.error('{func:s}: subprocess {proc:s} returned error code {err!s}'.format(func=cFuncName, proc=amc.dRTK['bin2rnx']['SBF2RIN'], err=e))
sys.exit(amc.E_OSERROR)
pass
def ublox2rinex(logger: logging.Logger, dGnssSysts: dict):
"""
ublox2rinex converts a uBLOX file to rinex according to the GNSS systems selected
"""
cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')
# convert to RINEX for selected GNSS system
logger.info('{func:s}: RINEX conversion for {gnss:s}'.format(func=cFuncName, gnss=amc.dRTK['gnssSyst']))
# determine systems to exclude, adjust when COM is asked meaning use GAL+GPS
if amc.dRTK['gnssSyst'].lower() == 'com':
excludeGNSSs = [key for key, value in dGnssSysts.items() if not (value.lower().startswith('gal') or value.lower().startswith('gps'))]
else:
excludeGNSSs = [key for key, value in dGnssSysts.items() if not value.lower().startswith(amc.dRTK['gnssSyst'].lower())]
logger.info('{func:s}: excluding GNSS systems {excl!s}'.format(func=cFuncName, excl=excludeGNSSs))
pass
def main(argv):
"""
pyconvbin converts raw data from SBF/UBlox to RINEX
"""
amc.cBaseName = colored(os.path.basename(__file__), 'yellow')
cFuncName = colored(os.path.basename(__file__), 'yellow') + ' - ' + colored(sys._getframe().f_code.co_name, 'green')
# limit float precision
encoder.FLOAT_REPR = lambda o: format(o, '.3f')
# dictionary of GNSS systems
dGNSSSysts = {'G': 'GPS', 'R': 'Glonass', 'E': 'Galileo', 'S': 'SBAS', 'C': 'Beidou', 'J': 'QZSS', 'I': 'IRNSS'}
# treat command line options
rootDir, binFile, binType, rinexDir, rinexVersion, gnssSyst, rinexNaming, overwrite, logLevels = treatCmdOpts(argv)
# create logging for better debugging
logger = amc.createLoggers(os.path.basename(__file__), dir=rootDir, logLevels=logLevels)
# store cli parameters
amc.dRTK = {}
amc.dRTK['rootDir'] = rootDir
amc.dRTK['binFile'] = binFile
amc.dRTK['binType'] = binType
amc.dRTK['rinexDir'] = rinexDir
amc.dRTK['rinexVersion'] = rinexVersion
amc.dRTK['gnssSyst'] = gnssSyst
amc.dRTK['rinexNaming'] = rinexNaming
logger.info('{func:s}: arguments processed: amc.dRTK = {drtk!s}'.format(func=cFuncName, drtk=amc.dRTK))
# check validity of passed arguments
retCode = checkValidityArgs(logger=logger)
if retCode != amc.E_SUCCESS:
logger.error('{func:s}: Program exits with code {error:s}'.format(func=cFuncName, error=colored('{!s}'.format(retCode), 'red')))
sys.exit(retCode)
# locate the conversion programs SBF2RIN and CONVBIN
amc.dRTK['bin2rnx'] = {}
amc.dRTK['bin2rnx']['CONVBIN'] = location.locateProg('convbin', logger)
amc.dRTK['bin2rnx']['SBF2RIN'] = location.locateProg('sbf2rin', logger)
# convert binary file to rinex
logger.info('{func:s}: convert binary file to rinex'.format(func=cFuncName))
if amc.dRTK['binType'] == 'SBF':
sbf2rinex(logger=logger, dGnssSysts=dGNSSSysts)
else:
ublox2rinex(logger=logger, dGnssSysts=dGNSSSysts)
# report to the user
logger.info('{func:s}: amc.dRTK =\n{json!s}'.format(func=cFuncName, json=json.dumps(amc.dRTK, sort_keys=False, indent=4)))
if __name__ == "__main__": # Only run if this file is called directly
main(sys.argv)
|
import json
import logging
import os
import random
import time
import requests
import pyfiglet
from pathlib import Path
from flask import (Flask, flash, jsonify, make_response, redirect,
render_template, request, url_for)
from flask_wtf import FlaskForm
# Flask app
app = Flask(__name__, template_folder="../../resources/templates",
static_folder="../../resources/static")
app.config['SECRET_KEY'] = '\xf0\x9524"C\xa2\xdd\xac\xc6\xa2O\t\xaf\x0bA\x96,5\xe5r\x96\x99\xc8'
@app.route("/")
def home():
'''
Endpoint serving about home page and form
'''
form = FlaskForm()
if form.validate_on_submit():
return redirect('/')
return render_template("home.html", title='Home', form=form)
@app.route('/asciiart', methods=['POST'])
def asciiart():
print(request.form.get('text'))
text = request.form.get('text')
if(text == None):
result = {
'error': 'invalid input',
}
else:
response = generate_art_task(text)
return response['art'], 201, {'Content-Type': 'text/plain'}
def generate_art_task(text):
'''
Background task that runs a long function.
'''
try:
ascii_banner = pyfiglet.figlet_format(text)
time.sleep(5)
result = {
"text": text,
"art": ascii_banner
}
except Exception as e:
print(e)
print(' [generate_art_task] completed')
return result
@app.route("/about")
def about():
'''
Endpoint serving about page
'''
return render_template("about.html") |
#necessary
import pygame, sys, time, random
from pygame.locals import *
#creating the 'Hoop' class
class Hoop(pygame.sprite.Sprite):
#making the hoop sprite
def __init__(self,x):
super().__init__()
#the x coordinate and making it's rectangle
#note the rectangle is only the top line of the hoop so it does not count as hitting the ball and winning
self.x = x
self.rect = pygame.Rect(self.x,355,45,1)
#return the hoop's x value
def return_x(self):
return self.rect.x
#move the hoop to the right
def move_right(self):
self.rect.x += 5
#move the hoop left
def move_left(self):
self.rect.x -= 5
#reset the hoop, but randomize the starting position some so that rounds are not always the same
def reset(self):
random_hoop_spawn = random.randint(1,40)
self.rect.x = random_hoop_spawn * 5
|
import requests
from PyQt5.QtCore import QThread
from PyQt5.QtCore import pyqtSignal
from bs4 import BeautifulSoup
agent = 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.8 Mobile Safari/537.36'
baseUrl = "http://club.sanguosha.com/forum.php?mod=viewthread&tid=206633&page=1&mobile=2"
headers = {
'Connection':'keep-alive',
'Host': 'club.sanguosha.com',
'User-Agent': agent,
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, sdch',
'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6'
}
r = requests.get(baseUrl, headers=headers)
msgList = []
soup = BeautifulSoup(r.text, 'lxml')
for replyContent in soup.find_all(class_='plc cl'):
if replyContent.find(class_='display pi'):
reply = {}
reply['avatar'] = replyContent.find(class_='avatar')
reply['author']=replyContent.find(class_='blue').get_text()
reply['message']=replyContent.find(class_='message')
msgList.append(reply)
print(reply)
|
import unittest
import pytest
from unittest.mock import patch, MagicMock
import resource_manager.src.util.common_test_utils as common_test_utils
import resource_manager.src.util.boto3_client_factory as client_factory
SERVICE_NAME = 'some_service'
@pytest.mark.unit_test
class TestCommonTestUtil(unittest.TestCase):
def setUp(self):
self.patcher = patch('boto3.client')
self.client = self.patcher.start()
self.mock_ec2 = MagicMock()
self.session_mock = MagicMock()
self.side_effect_map = {
'ec2': self.mock_ec2
}
self.client.side_effect = lambda service_name, config=None: self.side_effect_map.get(service_name)
self.session_mock.client.side_effect = lambda service_name, config=None: \
self.side_effect_map.get(service_name)
def tearDown(self):
self.patcher.stop()
client_factory.clients = {}
client_factory.resources = {}
def test_put_to_ssm_test_cache(self):
cache = {}
cache_key = "key"
cache_property = "property"
value = "value"
common_test_utils.put_to_ssm_test_cache(cache, cache_key, cache_property, value)
self.assertEqual({'key': {'property': 'value'}}, cache)
cache = {}
cache_key = None
cache_property = "property"
value = "value"
common_test_utils.put_to_ssm_test_cache(cache, cache_key, cache_property, value)
self.assertEqual({'property': 'value'}, cache)
cache = {'key': {'property2': 'value2'}}
cache_key = "key"
cache_property = "property"
value = "value"
common_test_utils.put_to_ssm_test_cache(cache, cache_key, cache_property, value)
self.assertEqual({'key': {'property': 'value', 'property2': 'value2'}}, cache)
def test_extract_all_from_input_parameters(self):
cache = {}
alarms = {}
input_parameters = """|a|b|
|{{cfn-output:test_stack>test_key}}|c|"""
cf_output = {'test_stack': {'test_key': 'test_val'}}
res = common_test_utils.extract_all_from_input_parameters(cf_output, cache, input_parameters, alarms)
self.assertEqual({'a': 'test_val', 'b': 'c'}, res)
def test_do_cache_by_method_of_service(self):
self.client.some_method.return_value = {'Key2': 'Key2Value', 'Key3': 'Key3Value1'}
actual_ssm_test_cache = {}
expected_ssm_test_cache = {'before': {'Key2': ['Key2Value'], 'Key3': ['Key3Value1']}}
common_test_utils.do_cache_by_method_of_service("before", "some_method",
{'Input-Key1': 'Value1', 'Input-Key2': ['Value1'],
'Output-Key2': '$.Key2', 'Output-Key3': ['$.Key3']},
self.client, actual_ssm_test_cache)
self.assertEqual(expected_ssm_test_cache, actual_ssm_test_cache)
def test_do_cache_by_method_of_service_raise(self):
self.assertRaises(AssertionError,
common_test_utils.do_cache_by_method_of_service, "before", "some_method",
{'Input-Key1': 'Value1', 'Input-Key2': ['Value1'],
'Output-Key2': '$.Key2', 'Output-Key3': ['$.Key3_1', '$.Key3_2']},
self.client, {})
def test_extract_and_cache_param_values(self):
cf_output = {'test_stack': {'test_key': 'test_val'}}
input_parameters = """|a|b|
|{{cfn-output:test_stack>test_key}}|c|"""
param_list = 'a,b'
resource_manager = MagicMock()
resource_manager.get_cfn_output_params.return_value = cf_output
ssm_test_cache = {}
common_test_utils.extract_and_cache_param_values(
input_parameters,
param_list,
resource_manager,
ssm_test_cache,
'test'
)
self.assertEqual({'test': {'a': 'test_val', 'b': 'c'}}, ssm_test_cache)
def test_check_security_group_exists_no_sg(self):
self.mock_ec2.describe_security_groups.return_value = {
'SecurityGroups': []
}
res = common_test_utils.check_security_group_exists(self.session_mock, 'test')
self.assertEqual(False, res)
def test_check_security_group_exists(self):
self.mock_ec2.describe_security_groups.return_value = {
'SecurityGroups': [
{
"GroupName": "test"
}
]
}
res = common_test_utils.check_security_group_exists(self.session_mock, 'test')
self.assertEqual(True, res)
|
#
# Copyright 2015-2020 Andrey Galkin <andrey@futoin.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import setup, find_packages
import os, sys
project_path = os.path.dirname( __file__ )
sys.path.insert( 0, project_path )
from futoin.cid import __version__ as version
with open(os.path.join(project_path, 'README.rst'), 'r') as f:
long_description = f.read()
config = {
'name': 'futoin-cid',
'version': version,
'namespace_packages': ['futoin'],
'description': 'FutoIn Continuous Integration & Delivery Tool',
'long_description': long_description,
'author': 'Andrey Galkin',
'author_email': 'andrey@futoin.org',
'url': 'https://github.com/futoin/cid-tool',
'download_url': 'https://github.com/futoin/cid-tool/archive/master.zip',
'install_requires': [
'docopt',
#'requests>=2.18.4',
# be compatible with docker/docker-compose
'requests>=2.5.2',
'urllib3>=1.21.1',
'distro',
],
# temporary disabled due to py3 failures on setup of pylint
#'setup_requires': ['autopep8', 'pylint'],
'extras_require': {
'test': ['nose'],
},
'python_requires': '>=3.3',
'packages': find_packages(exclude=['bind', 'tests']),
'entry_points': {
'console_scripts': [
'cid=futoin.cid.cli:run',
'cte=futoin.cid.cte:run',
'futoin-cid=futoin.cid.cli:run',
],
},
'classifiers': [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: C',
'Programming Language :: C++',
'Programming Language :: Java',
'Programming Language :: JavaScript',
'Programming Language :: Other',
'Programming Language :: PHP',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Ruby',
'Programming Language :: Unix Shell',
'Topic :: Software Development :: Build Tools',
'Topic :: System :: Installation/Setup',
'Topic :: Utilities',
],
'keywords': 'php ruby node nodejs npm gem rvm nvm grunt gulp bower \
puppet build deploy futoin cmake make gradle maven java composer bundler',
'license': 'Apache 2.0',
}
setup(**config)
|
class Solution(object):
def rob(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
dpRob0=[0]*len(nums)
dpNotRob0=[0]*len(nums)
dpRob0[0]=nums[0]
dpRob0[1]=0 #####
for i in range(2,len(nums)-1):
for j in range(i-1):
dpRob0[i]=max(dpRob0[i],nums[i]+dpRob0[i])
rob0=max(dpRob0)
dpNotRob0[0]=0
dpNotRob0[1]=nums[1]
for i in range(2,len(nums)):
for j in range(i-1):
dpNotRob0[i]=max(dpNotRob0[i],nums[i]+dpNotRob0[i])
notrob0=max(dpNotRob0)
print(rob0,notrob0)
return max(rob0,notrob0)
|
import sqlite3
import argparse
from gensim.models.doc2vec import Doc2Vec
import numpy as np
from gensim import matutils
import itertools
from sklearn.preprocessing import normalize
from random import shuffle
def getItemReviews(item, db):
con = sqlite3.connect(db)
c = con.cursor()
c.execute("SELECT user,rating,review FROM reviews WHERE item = {} and not test".format(item))
return c.fetchall()
def getAllReviews(db,test=False):
con = sqlite3.connect(db)
c = con.cursor()
if test:
c.execute("SELECT item,user,rating FROM reviews WHERE test")
else:
c.execute("SELECT item,user,rating FROM reviews WHERE not test")
return c.fetchall()
def getReviewText(db,user,item):
con = sqlite3.connect(db)
c = con.cursor()
c.execute("SELECT review FROM reviews WHERE user = {} and item = {}".format(user,item))
return c.fetchone()[0]
def getUserReviews(user, db):
con = sqlite3.connect(db)
c = con.cursor()
c.execute("SELECT item,rating,review FROM reviews WHERE user = {} and not test".format(user))
return c.fetchall()
def getSentAVG(user,db):
con = sqlite3.connect(db)
c = con.cursor()
c.execute("select avg(cpt) from (select user,item, count(*) as cpt from sentences where user = {} group by user,item)".format(user))
return c.fetchone()[0]
def find_ngrams(input_list, n):
return {" ".join(p) for p in zip(*[input_list[i:] for i in range(n)])}
def rouge_1_metric(real,predicted):
words_real = {w for w in real.replace("."," ").lower().split(" ")}
words_pred = {w for w in predicted.replace("."," ").lower().split(" ")}
return len(words_real.intersection(words_pred))/(len(words_real)+0.0)
def rouge_1_2_3_metric(words_real,words_pred):
words_real_n1 = find_ngrams(words_real,1)
words_pred_n1 = find_ngrams(words_pred,1)
words_real_n2 = find_ngrams(words_real,2)
words_pred_n2 = find_ngrams(words_pred,2)
words_real_n3 = find_ngrams(words_real,3)
words_pred_n3 = find_ngrams(words_pred,3)
r1 = len(words_real_n1.intersection(words_pred_n1))/(len(words_real_n1)+0.0)
r2 = len(words_real_n2.intersection(words_pred_n2))/(len(words_real_n2)+0.0)
r3 = len(words_real_n3.intersection(words_pred_n3))/(len(words_real_n3)+0.0)
return (r1,r2,r3)
def predict_text(model,vect,texts,num_sent):
sents = list(itertools.chain.from_iterable([x.split(".") for x in texts]))
sents = [x for x in sents if len(x)>2]
if len(sents) < 1:
return None
sentences = np.zeros((len(sents),model.layer1_size))
for i,sent in enumerate(sents):
for word in sent.split():
if word in model.vocab:
sentences[i] += model[word]
sentences = normalize(sentences,axis=1,norm="l1")
sim = np.dot(sentences,vect)
order = np.argsort(sim)[::-1]
text = ""
sents = [sents[a] for a in order[:num_sent]]
return ".".join(sents)
def k_sim(model, db,neigh="item",n=None):
if neigh not in {"user","item"}:
print("only {} as similarity".format(["user","item"]))
print("prepping data")
test_data = [(item, user, rating) for item, user, rating in getAllReviews(db, test=True)]
shuffle(test_data)
print("test data ready")
cpt_test = 0
cpt_skipped = 0
r1s,r2s,r3s = 0,0,0
cpt_sent = 0
for item, user, rating in test_data:
if cpt_test >= len(test_data)/2: # we only evaluate on random 50%
break
if ("u_{}".format(user) not in model.vocab and neigh=="user") or ("i_{}".format(item) not in model.vocab and neigh=="item"): #skip not in vocab
cpt_skipped += 1
continue
if neigh=="user":
vect = model["u_{}".format(user)]
elif neigh=="item":
vect = model["i_{}".format(item)]
else:
raise Exception("Neigh not item nor user")
vect = matutils.unitvec(vect)
if neigh == "user":
list_text = [stext for _,_,stext in getItemReviews(item, db)]
elif neigh == "item":
list_text = [stext for _,_,stext in getUserReviews(user, db)]
else:
raise Exception("Neigh not item nor user")
rtext = getReviewText(db,user,item)
if n<=1:
ptext = predict_text(model,vect,list_text,1)
cpt_sent += 1
else:
avg_sent = getSentAVG(user,db)
if avg_sent is not None:
ptext = predict_text(model,vect,list_text,round(avg_sent))
cpt_sent += avg_sent
else:
ptext = None
if ptext is None:
cpt_skipped +=1
continue
rtext = rtext.replace("."," ").lower().split(" ")
ptext = ptext.replace("."," ").lower().split(" ")
if len(ptext) < 3 or len(rtext) < 3:
cpt_skipped += 1
continue
r1,r2,r3 = rouge_1_2_3_metric(rtext,ptext)
r1s += r1
r2s += r2
r3s += r3
cpt_test += 1
if cpt_test % 100 == 0:
print("Tests is {}, rouge1 is {}, rouge2 is {}, rouge3 is {} - avg length : {} - {} test cases where skipped".format(cpt_test,r1s/(cpt_test+0.0),r2s/(cpt_test+0.0),r3s/(cpt_test+0.0),cpt_sent/(cpt_test+0.0),cpt_skipped))
print("FINAL: Tests is {}, rouge1 is {}, rouge2 is {}, rouge3 is {} - avg length : {} - {} test cases where skipped".format(cpt_test,r1s/(cpt_test+0.0),r2s/(cpt_test+0.0),r3s/(cpt_test+0.0),cpt_sent/(cpt_test+0.0),cpt_skipped))
parser = argparse.ArgumentParser()
parser.add_argument("--n",default=1, type=int)
parser.add_argument("--neigh",default="item",type=str)
parser.add_argument("model", type=str)
parser.add_argument("db", type=str)
args = parser.parse_args()
db = args.db
model = Doc2Vec.load_word2vec_format(args.model, binary=True,norm_only=False)
k_sim(model,db,neigh=args.neigh,n=args.n)
|
import pandas as pd
from sklearn import preprocessing
from sklearn.metrics import accuracy_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split
# Load data
data = pd.read_csv("MarvellousInfosystems_PlayPredictor.csv")
# print(data.head())
# data['Play'].unique()
# Clean, Prepare and manipulate data
feature_nm=['Whether','Temperature']
print("Feture name",feature_nm)
# Creating labelEncoder
label_encoder = preprocessing.LabelEncoder()
data['Play'] = label_encoder.fit_transform(data['Play'])
data['Wether'] = label_encoder.fit_transform(data['Wether'])
data['Temperature'] = label_encoder.fit_transform(data['Temperature'])
#print(data['Play'].unique())
#print(data.head())
# Encode labels in column 'species'.
# df['species'] = label_encoder.fit_transform(df['species'])
# df['species'].unique()
# Combining weather and temp into single listof tuples
features=list(zip(data['Wether'],data['Temperature']))
#data_train, data_test, target_train, target_test = train_test_split(data, target, test_size=0.5)
# train data
classifier = KNeighborsClassifier(n_neighbors=3)
classifier.fit(features, data['Play'])
# Test data data_test
predictions = classifier.predict([[0,2]]) # 0:Overcast, 2: Mild
print(predictions)
#Accuracy = accuracy_score(target_test, predictions)
#print("Accuracy", Accuracy)
|
from datetime import datetime
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from dummy_task import dummyVal
def print_hello():
return 'Hello world!'
dag = DAG('hello_world', description='Simple tutorial DAG',
schedule_interval='*/3 * * * *',
start_date=datetime(2019, 10, 22), catchup=False)
dummy_operator = PythonOperator(task_id='task-is-dummy', python_callable=dummyVal, dag=dag)
hello_operator = PythonOperator(task_id='hello_task', python_callable=print_hello, dag=dag)
dummy_operator >> hello_operator
|
class Solution(object):
def hammingWeight(self, n):
res = 0
while n:
res += n%2
n = n//2
return res |
import matplotlib.pyplot as plt
import pandas as pd
from bs4 import BeautifulSoup
from requests import get
import seaborn as sns
sns.set(rc={'figure.figsize':(11.7,8.27)})
fromyear = int(input("Enter From Year: "))
tillyear = int(input("Till Year: "))
# Redeclaring the lists to store data in
names = []
years = []
imdb_ratings = []
metascores = []
votes = []
pages = [str(i) for i in range(1, 5)]
years_url = [str(i) for i in range(fromyear, tillyear)]
Movieshowtime = []
Moviegenre=[]
for year_url in years_url:
for page in pages:
response = get('http://www.imdb.com/search/title?release_date=' + year_url +
'&sort=num_votes,desc&page=' + page + '"Accept-Language": "en-US, en;q=0.5"')
page_html = BeautifulSoup(response.text, 'html.parser')
mv_containers = page_html.find_all('div', class_='lister-item mode-advanced')
# For every movie of these 50
for container in mv_containers:
if container.find('div', class_='ratings-metascore') is not None:
name = container.h3.a.text
names.append(name)
year = container.h3.find('span', class_='lister-item-year').text
years.append(year)
imdb = float(container.strong.text)
imdb_ratings.append(imdb)
m_score = container.find('span', class_='metascore').text
metascores.append(int(m_score))
vote = container.find('span', attrs={'name': 'nv'})['data-value']
votes.append(int(vote))
showtime = container.p.find('span', class_='runtime').text
Movieshowtime.append((int(showtime.replace(" min", ''))))
genre = container.p.find('span', class_='genre').text
genre = str(genre)
genre = genre.replace("\n", '')
Moviegenre.append((genre))
movie_ratings = pd.DataFrame({'movie': names,
'year': years,
'imdb': imdb_ratings,
'metascore': metascores,
'votes': votes,
'runtime': Movieshowtime,
'genre':Moviegenre})
print(movie_ratings.info())
movie_ratings.head(10)
print(movie_ratings)
movie_ratings.to_csv('mov.csv', index=False)
imdb_csv = r'C:\Users\sw\1.csv'
df = pd.read_csv(imdb_csv)
print("Average Run time of the movies are :",df['runtime'].mean())
df.sort_values('imdb')
df.to_csv('123.csv', index=False)
df[['imdb','votes']].groupby('imdb').count().plot(kind='bar', title='IMDB ratings Visualization')
plt.xlabel('Votes')
plt.ylabel('')
plt.show()
categories = set([s for genre_list in df.genre.unique() for s in genre_list.split(",")])
# one-hot encode each movie's classification
for cat in categories:
df[cat] = df.genre.transform(lambda s: int(cat in s))
# drop other columns
df = df[['genre','runtime'] + list(categories)]
# convert from wide to long format and remove null classificaitons
df = pd.melt(df,
id_vars=['runtime'],
value_vars = list(categories),
var_name = 'Category',
value_name = 'Count')
df = df.loc[df.Count>0]
top_categories = df.groupby('Category').aggregate(sum).sort_values('Count', ascending=False).index
howmany=8
# add an indicator whether a movie is short or long, split at 100 minutes runtime
df['duration'] = df.runtime.transform(lambda x: int(x > 100))
df = df.loc[df.Category.isin(top_categories[:howmany])]
p = sns.countplot(data=df, x = 'Category')
plt.show()
p = sns.countplot(data=df,
y = 'Category',
hue = 'duration')
plt.show()
|
import pygame, time, sys
pygame.init()
pygame.time.delay(100)
WIDTH= 600
HEIGHT= 400
#create the object
bg= pygame.image.load("Images/courtBACKGROUND.jpg")
dROSE= pygame.image.load("Images/roseSPRITE.jpg")
##create object to open window
screen= pygame.display.set_mode((WIDTH, HEIGHT))
white= (255, 255, 255)
black= (0, 0, 0)
red= (255, 0, 0)
purple= (171, 18, 222)
screen.fill(white) #changes color of display
pygame.display.set_caption("My Game") #Change title on the screen and also can change icon
pygame.display.update() #what does this do? Updates the screen
#you must always... ALWAYS
check= True
x= 200
y= 200
rad= 30
hbox, wbox= 20, 20
rect= pygame.Rect(x, y, hbox, wbox) #creates a rectangle
rect1= pygame.Rect(x, y, hbox, wbox)
jumpCHECK= False
jump= 10
while check:
for event in pygame.event.get():
if event.type== pygame.QUIT:
check= False
speed= 5
keyboardPRESS= pygame.key.get_pressed() #checking what key is pressed
if keyboardPRESS[pygame.K_LEFT]: #moving left on x (-)
rect.x -= speed
x -= speed
if keyboardPRESS[pygame.K_RIGHT]:
rect.x += speed
#check if we are going to jump
if not jumpCHECK:
if keyboardPRESS[pygame.K_UP]:
rect.y -= speed
y += speed
if keyboardPRESS[pygame.K_DOWN]:
rect.y += speed
if keyboardPRESS[pygame.K_SPACE]:
jumpCHECK= True
else:
if jump >=0 -10:
rect.y -=(jump*abs(jump))/2
jump -=1
else:
jump = 10
jumpCHECK= False
if keyboardPRESS[pygame.K_i]:
rect1.y -= speed
if keyboardPRESS[pygame.K_k]:
rect1.y += speed
if keyboardPRESS[pygame.K_l]:
rect1.x += speed
if keyboardPRESS[pygame.K_j]:
rect1.x -= speed
if rect.x < 0: rect.x=0
if rect.x > WIDTH-wbox: rect.x = WIDTH-wbox
if rect.y < 0: rect.y=0
if rect.y > HEIGHT-hbox: rect.y = HEIGHT-hbox
if rad < 0: rad=1
if rad > WIDTH-x: rad = WIDTH-x
if rect1.x < 0: rect.x=0
if rect1.x > WIDTH-wbox: rect.x = WIDTH-wbox
if rect1.y < 0: rect.y=0
if rect1.y > HEIGHT-hbox: rect.y = HEIGHT-hbox
if rect.colliderect(rect1):
rect.x -=3
rect1.x +=3
screen.fill(white)
screen.blit(bg, (0, 0))
screen.blit(dROSE, (x, y))
pygame.draw.rect(screen, (black), rect)
pygame.draw.rect(screen, (black), rect1)
pygame.draw.circle(screen, (black), (x+50, y+50), rad, 2)
if keyboardPRESS[pygame.K_w]:
rad += speed
if keyboardPRESS[pygame.K_s]:
rad -= speed
pygame.display.update()
pygame.time.delay(30)
pygame.quit()
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# gen.py
# Diceware passphrase generator.
# More info: https://www.eff.org/dice
# Note: This uses secrets, has protection against shell injection attacks,
# and I have tried to keep things as secure as possible however the best way to
# generate diceware passphrases is by hand as specified on the page above.
#
# Uses pyperclip for adding the password to the clipboard which
# depends on xclip on linux (https://pypi.org/project/pyperclip/).
#
# ----
# Security Note from pyperclip's source code
# (https://github.com/asweigart/pyperclip/blob/master/src/pyperclip/__init__.py)
# This module (pyperclip) runs programs with these names:
# - which
# - where
# - pbcopy
# - pbpaste
# - xclip
# - xsel
# - wl-copy/wl-paste
# - klipper
# - qdbus
# A malicious user could rename or add programs with these names, tricking
# Pyperclip into running them with whatever permissions the Python process has.
# ----
#
# Deepraj Pandey
# 14 Feb, 2021
import os
from collections import OrderedDict
from platform import system
from pyperclip import copy, paste
from secrets import randbelow
from shlex import quote
from subprocess import Popen
def windowsss():
"""Opinionated exit sequence for Windows"""
print("We noticed you are on Windows 🤮. ", end='')
print("Please switch to something more decent. :p")
print(
"⚠️\ Your password will be on the clipboard until you clear it manually!")
raise SystemExit(1)
def unsupported():
"""Exit sequence on unsupported platforms"""
print("Platform not supported for autoclearing clipboard.")
print(
"⚠️\ Your password will be on the clipboard until you clear it manually!")
raise SystemExit(1)
# Take length of password to be generated (phrase_len)
# TODO: set up argparse with flags and help menu
phrase_len = 6
clipboard_timeout = 15 # time to wait in seconds before clearing clipboard
# Roll dice 5x and generate index code - `phrase_len` times
# randbelow(n) [0,n), hence +1. We need 5 random numbers joined to
# create one index, and `phrase_len` such indices
indices = [("".join([str(randbelow(6) + 1) for _ in range(5)]))
for _ in range(phrase_len)]
# Ordered Dict of the indices in original random order for storing words
search_space = OrderedDict((i, '') for i in indices)
# Sort for access in one pass of wordlist
# There is probably a beter way to do this.
indices.sort()
# Index into wordlist and get word
filename = "./word.list"
with open(filename, 'r') as f:
for line in f:
# if no indices to search for, break
if not len(indices):
break
contents = line.split()
if contents[0] == indices[0]:
# place the word in `search_space`
search_space[indices[0]] = contents[1]
# indices is sorted, so continuing from next line works
indices.pop(0)
# Copy passphrase to clipboard
copy("-".join([search_space[id] for id in search_space]))
# Clear clipboard (on MacOS) if it still has the passphrase
# `clipboard_timeout` seconds afer pasting.
# Since there is no copy of the password, we check for number of '-'
if paste().count('-') == phrase_len - 1:
cmd = "sleep {} && ".format(
quote(str(clipboard_timeout)))
if system() == 'Darwin' or os.name == 'mac':
cmd += "pbcopy < /dev/null"
elif system() == 'Linux' and os.path.isfile('/proc/version'):
# handle WSL
with open('/proc/version', 'r') as f:
if "microsoft" in f.read().lower():
windowsss() # quit!
cmd += "xclip -selection c < /dev/null"
else:
if system() == 'Windows' or os.name == 'nt':
windowsss() # quit!
else:
unsupported()
Popen(cmd, shell=True, start_new_session=True)
|
#!flask/bin/python
# ! /usr/bin/env python
# coding=utf-8
from flask import Flask, jsonify, abort, request, render_template, redirect, url_for, flash, send_from_directory, send_file
from werkzeug.utils import secure_filename
from app import app
import json
import os
from jtnltk import qacorpus, frequency, synonymy
from forms import myForm
@app.route('/')
@app.route('/index/')
def index():
form = myForm()
return render_template('index.html', form=form)
@app.route('/upload/', methods=['GET', 'POST'])
def upload():
form = myForm()
if request.method == 'GET':
return redirect('/index/')
elif request.method == 'POST':
fil = request.files['file']
fname = secure_filename(fil.filename)
# save_path = app.config['UPLOAD_FOLDER']
if fname.strip() == '':
flash("Please select file.")
return redirect('/index/')
if 'xlsx' in fname or 'xls' in fname:
fname = 'tmp.xlsx'
save_path = app.config['TMP_FOLDER']
fil.save(os.path.join(save_path, fname))
return redirect('/select_column/')
elif '.xml' in fname:
fname = 'tmp.xml'
save_path = app.config['TMP_FOLDER']
fil.save(os.path.join(save_path, fname))
return redirect('/process/')
else:
fname = 'rawtext.txt'
save_path = app.config['TMP_FOLDER']
fil.save(os.path.join(save_path, fname))
return redirect('/process/')
@app.route('/select_column/', methods=['GET', 'POST'])
def select_column():
form = myForm()
if request.method == 'POST':
if form.colnum.data != '':
qacorpus.getrawtext(columnnum=int(form.colnum.data) - 1)
return redirect('/process')
else:
flash("Please select column." + form.colnum.data)
return render_template('xls.html', form=form, )
return render_template('xls.html', form=form, )
@app.route('/result/<filename>', methods=['GET'])
def download_file(filename):
re = open('app/result/' + filename, 'w')
re.write(open(os.path.join(app.config['TMP_FOLDER'], filename)).read())
re.close()
uploads = os.path.join(app.root_path, 'result')
# print uploads
return send_from_directory(directory = uploads, filename = filename, as_attachment = True)
@app.route('/process/', methods=['GET', 'POST'])
def process():
form = myForm()
link = ''
label = ''
if request.method == 'GET':
return render_template('process.html', form=form, link='', label='')
if request.method == 'POST':
label = 'DownloadResult'
rawtext = open(os.path.join(
app.config['TMP_FOLDER'], 'rawtext.txt')).readlines()
if form.fre.data == 'ex':
link = u'/result/rawtext.txt'
elif form.fre.data == 'fr':
qacorpus.frequency(rawtext)
link = u'/result/frequency.txt'
elif form.fre.data == 'al':
synonymy.get_wiki_synonymy(rawtext)
link = u'/result/aliases.txt'
elif form.fre.data == 'sy':
synonymy.get_hit_synonymy(rawtext)
link = u'/result/synonymy.txt'
elif form.fre.data == 'xml':
frequency.statistics()
link = u'/result/CfgKeywordWeight.properties'
return render_template('process.html', form=form, link=link, label=label)
@app.route('/frequency', methods=['POST'])
def create_task():
# print json.dumps(request.json)
if not request.json or not 'content' in request.json:
abort(400)
num, worddict = frequency.statistics(request.json['content'])
# print len(worddict)
task = {
'Number': num,
'Weight': worddict
}
# tasks.append(task)
return jsonify({'result': task}), 201
@app.route('/uploadfile', methods=['POST'])
def uploadfile():
if request.files:
f = request.files['file']
f.save('tmp/tmp.xlsx')
return 'upload file successfully!', 201
else:
abort(400)
@app.route('/getrawtext', methods=['POST'])
def getrawtext():
# print json.dumps(request.json)
lines = []
if request.json:
lines = qacorpus.getrawtext(columnnum=request.json['colnum'] - 1)
else:
abort(400)
task = {
'lines': lines,
}
return jsonify({'result': task}), 201
@app.route('/getfrequency', methods=['POST'])
def getfrequency():
lines = []
if request.json:
lines = qacorpus.get_xlsx_frequency(
columnnum=request.json['colnum'] - 1)
else:
abort(400)
task = {
'lines': lines,
}
return jsonify({'result': task}), 201
@app.route('/getsynonymy', methods=['POST'])
def getsynonymy():
lines = []
if request.json:
lines = synonymy.get_wiki_synonymy(request.json['wordlist'])
else:
abort(400)
task = {
'lines': lines,
}
return jsonify({'result': task}), 201
|
import os
from importlib import util
from unittest import TestCase
import pandas as pd
from proxy_builder import RidgeCSRegression
class TestRidgeCSRegression(TestCase):
def setUp(self) -> None:
pkg_name = "proxy_builder_tests.test_files"
file_train_input = "cds_spreads_data_all.csv"
spec = util.find_spec(pkg_name)
path = spec.submodule_search_locations[0]
path_to_train = os.path.join(path, file_train_input)
self.train_input = pd.read_csv(path_to_train, index_col=0)
self.key_cols = ['region', 'sector', 'rating', 'tenor']
self.y_column = "benchmark"
self.reg_params = {"alpha": 0.1}
self.params_to_tune = {'alpha': [0.0001, 0.001, 0.01, 0.1, 1]}
self.reg_model_xsec_ridge = RidgeCSRegression(train_df=self.train_input, cols_x_dummy=self.key_cols,
col_y=self.y_column)
self.reg_model_xsec_ridge_2 = RidgeCSRegression(train_df=self.train_input, cols_x_dummy=self.key_cols,
col_y=self.y_column)
self.reg_model_xsec_ridge_2.fit(**self.reg_params)
def test_fit_1(self):
self.reg_model_xsec_ridge.fit(**self.reg_params)
self.assertIsNotNone(self.reg_model_xsec_ridge.model)
def test_fit_2(self):
self.reg_model_xsec_ridge.fit(**self.reg_params)
self.assertEqual(self.reg_model_xsec_ridge.model.model, "ridge")
def test_fit_cv_1(self):
self.reg_model_xsec_ridge.fit_cv(self.params_to_tune)
obtained_alpha = self.reg_model_xsec_ridge.model.alpha
self.assertIn(obtained_alpha, self.params_to_tune.get("alpha"))
def test_rsquared(self):
rsquared = self.reg_model_xsec_ridge_2.rsquared
self.assertLess(rsquared, 1)
def test_data(self):
x_df = self.reg_model_xsec_ridge_2.x_train
y_df = self.reg_model_xsec_ridge_2.y_train
self.assertEqual(x_df.shape[0], y_df.shape[0])
def test_predictions(self):
predictions = self.reg_model_xsec_ridge_2.predictions
self.assertLess(abs(predictions[0] - -5.6549942406), 1e-06)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2018-01-25 08:34
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('internship', '0029_auto_20180122_2251'),
]
operations = [
migrations.RemoveField(
model_name='organizationaddress',
name='organization',
),
migrations.RemoveField(
model_name='periodinternshipplaces',
name='internship_offer',
),
migrations.RemoveField(
model_name='periodinternshipplaces',
name='period',
),
migrations.AddField(
model_name='internshipchoice',
name='registered',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.DeleteModel(
name='OrganizationAddress',
),
migrations.DeleteModel(
name='PeriodInternshipPlaces',
),
]
|
#!/usr/bin/env python
import math
def dist(x1, y1, x2, y2):
return math.sqrt((x1-x2)**2 + (y1-y2)**2)
def avg(gen):
total = 0
n = 0
for i in gen:
total += i
n += 1
return float(total) / float(n)
|
height = input("enter your height in m: ")
weight = input("enter your weight in kg: ")
height_new = float(height)
weight_new = float(weight)
bmi = weight_new / height_new**2
bmi_final = print(int(bmi))
|
"""
this is the main script, that will take ages, for sure.
"""
from typing import List, Tuple, Optional, Callable
from identify_idioms.service import build_iip
from idiom2vec.cleaners import Cleaner, CocaSpokCleaner, CocaMagCleaner, OpenSubCleaner
from idiom2vec.paths import (
OPENSUB_ORIGIN_SPLITS_DIR,
OPENSUB_TRAIN_SPLITS_DIR,
COCA_SPOK_ORIGIN_SPLITS_DIR,
COCA_SPOK_TRAIN_SPLITS_DIR,
COCA_MAG_ORIGIN_SPLITS_DIR,
COCA_MAG_TRAIN_SPLITS_DIR
)
from joblib import Parallel, delayed
import argparse
import json
import os
# --- global vars --- #
iip = build_iip() # identify-idioms pipeline
cleaner: Optional[Cleaner] = None # domain-specific cleaner
train_splits_dir: Optional[str] = None # to be used for progress tracking
origin_splits_dir: Optional[str] = None # to be used for progress tracking
process_line_fn: Optional[Callable] = None
def process_line_coca(line: str) -> List[List[str]]:
"""
split into sentences.
Tokenise each sentence.
Lemmatise and clean each token in the sentence.
"""
global iip, cleaner
cleaned = cleaner(line)
sents = cleaned.split(".")
# list of list of tokens. This is what we want.
sents_processed = [
[
token.lemma_.replace(" ", "_") if token._.is_idiom else token.text # lemmatise idioms only.
for token in iip(sent)
if len(token.text) > 1 # should be longer than 1
if not token.is_punct # don't need punctuations
if not token.like_num # don't need numbers
]
for sent in sents
]
return sents_processed
def process_line_opensub(line: str) -> List[str]:
global iip, cleaner
cleaned = cleaner(line)
return [
token.lemma_.replace(" ", "_") if token._.is_idiom else token.text # lemmatise only if they are idioms.
for token in iip(cleaned)
if len(token.text) > 1 # should be longer than 1
if not token.is_punct # don't need punctuations
if not token.like_num # don't need numbers
]
def process_split(paths: Tuple[str, str]):
# path to train split
global train_splits_dir
split_origin_path = paths[0]
split_train_path = paths[1]
results = list()
# --- process the results --- #
with open(split_origin_path, 'r') as fh_r:
for line in fh_r:
processed = process_line_fn(line)
results.append(json.dumps(processed))
# --- write the results --- #
with open(split_train_path, 'w') as fh_w:
# write the processed - newline delimited results
for result in results:
fh_w.write(result + "\n")
# tracking progress here.
print("Finished:" + str(split_origin_path))
total = len(os.listdir(origin_splits_dir))
done = len(os.listdir(train_splits_dir))
print("Progress: {} / {}".format(done, total))
def load_read_paths():
global origin_splits_dir, train_splits_dir
todo_ids = set([
todo_path.replace(".txt", "")
for todo_path in os.listdir(origin_splits_dir)
if todo_path.endswith(".txt")
])
done_ids = set([
done_path.replace(".ndjson", "")
for done_path in os.listdir(train_splits_dir)
if done_path.endswith(".ndjson")
])
pending_ids = todo_ids - done_ids
print("pending size:" + str(len(pending_ids)))
return [
os.path.join(origin_splits_dir, pending_id + ".txt")
for pending_id in pending_ids
]
def main():
global cleaner, origin_splits_dir, train_splits_dir, process_line_fn
parser = argparse.ArgumentParser()
parser.add_argument('--num_workers',
type=int,
default=3)
parser.add_argument('--corpus_name',
type=str,
default="coca_spok")
args = parser.parse_args()
# --- init the cleaner, process function and paths --- #
if args.corpus_name == "coca_spok":
print(args.corpus_name)
cleaner = CocaSpokCleaner()
process_line_fn = process_line_coca
origin_splits_dir = COCA_SPOK_ORIGIN_SPLITS_DIR
train_splits_dir = COCA_SPOK_TRAIN_SPLITS_DIR
elif args.corpus_name == "coca_mag":
cleaner = CocaMagCleaner()
process_line_fn = process_line_coca
origin_splits_dir = COCA_MAG_ORIGIN_SPLITS_DIR
train_splits_dir = COCA_MAG_TRAIN_SPLITS_DIR
elif args.corpus_name == "opensub":
cleaner = OpenSubCleaner()
process_line_fn = process_line_opensub
origin_splits_dir = OPENSUB_ORIGIN_SPLITS_DIR
train_splits_dir = OPENSUB_TRAIN_SPLITS_DIR
else:
raise ValueError("Invalid corpus name:" + args.corpus_name)
# --- prepare the read & write paths --- #
read_paths = load_read_paths()
read_write_paths = [
(read_path,
train_splits_dir + "/" + read_path.split("/")[-1].replace(".txt", ".ndjson"))
for read_path in read_paths
]
# --- execute the process with parallelism --- #
p = Parallel(n_jobs=args.num_workers)
p(delayed(process_split)(paths) for paths in read_write_paths)
if __name__ == '__main__':
main()
|
#to input the list and find sum of numbers in list except one number and print them
limit=int(input("enter limit"))
lst=list()
for i in range(0,limit):
number=int(input("enter number"))
lst.append(number)
#finding total of list
#total=20
#20-2=18
#20-5=15
#20-6=14
#20-7=13
out=list() #empty list
total=sum(lst)
for num in lst:
out.append(total-num)
print(out) |
#!/usr/bin/env python
# Copyright 2019 Pascal Audet & Helen Janiszewski
#
# This file is part of OBStools.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Import modules and functions
import numpy as np
import os.path
import pickle
import stdb
from obspy.clients.fdsn import Client
from obspy.geodetics.base import gps2dist_azimuth as epi
from obspy.geodetics import kilometer2degrees as k2d
from obspy.core import Stream, UTCDateTime
from obstools.atacr import utils, EventStream
from pathlib import Path
from argparse import ArgumentParser
from os.path import exists as exist
from numpy import nan
def get_event_arguments(argv=None):
"""
Get Options from :class:`~optparse.OptionParser` objects.
Calling options for the script `obs_download_event.py` that
accompany this package.
"""
parser = ArgumentParser(
usage="%(prog)s [options] <indb>",
description="Script used " +
"to download and pre-process four-component " +
"(H1, H2, Z and P), two-hour-long seismograms for " +
"individual events on which to apply the de-noising " +
"algorithms. Data are requested from the internet using " +
"the client services framework for a given date range. " +
"The stations are processed one by one and the data are " +
"stored to disk.")
parser.add_argument(
"indb",
help="Station Database to process from.",
type=str)
# General Settings
parser.add_argument(
"--keys",
action="store",
type=str,
dest="stkeys",
default="",
help="Specify a comma separated list of station keys " +
"for which to perform the analysis. These must be " +
"contained within the station database. Partial keys " +
"will be used to match against those in the "
"dictionary. For instance, providing IU will match with " +
"all stations in the IU network [Default processes " +
"all stations in the database]")
parser.add_argument(
"-C", "--channels",
action="store",
type=str,
dest="channels",
default="",
help="Specify a comma-separated list of channels for " +
"which to perform the transfer function analysis. " +
"Possible options are '12' (for horizontal channels) or 'P' " +
"(for pressure channel). Specifying '12' allows " +
"for tilt correction. Specifying 'P' allows for compliance " +
"correction. [Default '12,P' looks for both horizontal and " +
"pressure and allows for both tilt AND compliance corrections]")
parser.add_argument(
"-O", "--overwrite",
action="store_true",
dest="ovr",
default=False,
help="Force the overwriting of pre-existing data. " +
"[Default False]")
# Server Settings
ServerGroup = parser.add_argument_group(
title="Server Settings",
description="Settings associated with which "
"datacenter to log into.")
ServerGroup.add_argument(
"-S", "--Server",
action="store",
type=str,
dest="Server",
default="IRIS",
help="Specify the server to connect to. Options include: BGR, " +
"ETH, GEONET, GFZ, INGV, IPGP, IRIS, KOERI, LMU, NCEDC, NEIP, " +
"NERIES, ODC, ORFEUS, RESIF, SCEDC, USGS, USP. [Default IRIS]")
ServerGroup.add_argument(
"-U", "--User-Auth",
action="store",
type=str,
dest="UserAuth",
default="",
help="Enter your IRIS Authentification Username and Password " +
"(--User-Auth='username:authpassword') to access and download " +
"restricted data. [Default no user and password]")
# Constants Settings
FreqGroup = parser.add_argument_group(
title='Frequency Settings',
description="Miscellaneous frequency settings")
FreqGroup.add_argument(
"--sampling-rate",
action="store",
type=float,
dest="new_sampling_rate",
default=5.,
help="Specify new sampling rate (float, in Hz). " +
"[Default 5.]")
FreqGroup.add_argument(
"--units",
action="store",
type=str,
dest="units",
default="DISP",
help="Choose the output seismogram units. Options are: " +
"'DISP', 'VEL', 'ACC'. [Default 'DISP']")
FreqGroup.add_argument(
"--pre-filt",
action="store",
type=str,
dest="pre_filt",
default=None,
help="Specify four comma-separated corner " +
"frequencies (float, in Hz) for deconvolution " +
"pre-filter. [Default 0.001,0.005,45.,50.]")
FreqGroup.add_argument(
"--window",
action="store",
type=float,
dest="window",
default=7200.,
help="Specify window length in seconds. " +
"Default value is highly recommended. "
"Program may not be stable for large deviations " +
"from default value. [Default 7200. (or 2 hours)]")
# Event Selection Criteria
EventGroup = parser.add_argument_group(
title="Event Settings",
description="Settings associated with refining " +
"the events to include in matching station " +
"pairs")
EventGroup.add_argument(
"--start",
action="store",
type=str,
dest="startT",
default="",
help="Specify a UTCDateTime compatible string " +
"representing the start time for the event " +
"search. This will override any station start " +
"times. [Default start date of each station in " +
"database]")
EventGroup.add_argument(
"--end",
action="store",
type=str,
dest="endT",
default="",
help="Specify a UTCDateTime compatible string " +
"representing the start time for the event " +
"search. This will override any station end times " +
"[Default end date of each station in database]")
EventGroup.add_argument(
"--reverse-order", "-R",
action="store_true",
dest="reverse",
default=False,
help="Reverse order of events. Default behaviour " +
"starts at oldest event and works towards most " +
"recent. Specify reverse order and instead the " +
"program will start with the most recent events " +
"and work towards older")
EventGroup.add_argument(
"--min-mag",
action="store",
type=float,
dest="minmag",
default=5.5,
help="Specify the minimum magnitude of event " +
"for which to search. [Default 5.5]")
EventGroup.add_argument(
"--max-mag",
action="store",
type=float,
dest="maxmag",
default=None,
help="Specify the maximum magnitude of event " +
"for which to search. " +
"[Default None, i.e. no limit]")
# Geometry Settings
GeomGroup = parser.add_argument_group(
title="Geometry Settings",
description="Settings associatd with the " +
"event-station geometries")
GeomGroup.add_argument(
"--min-dist",
action="store",
type=float,
dest="mindist",
default=30.,
help="Specify the minimum great circle distance " +
"(degrees) between the station and event. " +
"[Default 30]")
GeomGroup.add_argument(
"--max-dist",
action="store",
type=float,
dest="maxdist",
default=120.,
help="Specify the maximum great circle distance " +
"(degrees) between the station and event. " +
"[Default 120]")
args = parser.parse_args(argv)
# Check inputs
if not exist(args.indb):
parser.error("Input file " + args.indb + " does not exist")
# create station key list
if len(args.stkeys) > 0:
args.stkeys = args.stkeys.split(',')
# create channel list
if len(args.channels) > 0:
args.channels = args.channels.split(',')
else:
args.channels = ['12', 'P']
for cha in args.channels:
if cha not in ['12', 'P']:
parser.error("Error: Channel not recognized " + str(cha))
# construct start time
if len(args.startT) > 0:
try:
args.startT = UTCDateTime(args.startT)
except Exception:
parser.error(
"Error: Cannot construct UTCDateTime from start time: " +
args.startT)
else:
args.startT = None
# construct end time
if len(args.endT) > 0:
try:
args.endT = UTCDateTime(args.endT)
except Exception:
parser.error(
"Error: Cannot construct UTCDateTime from end time: " +
args.endT)
else:
args.endT = None
# Parse User Authentification
if not len(args.UserAuth) == 0:
tt = args.UserAuth.split(':')
if not len(tt) == 2:
parser.error(
"Error: Incorrect Username and Password Strings for User " +
"Authentification")
else:
args.UserAuth = tt
else:
args.UserAuth = []
if args.pre_filt is None:
args.pre_filt = [0.001, 0.005, 45., 50.]
else:
args.pre_filt = [float(val) for val in args.pre_filt.split(',')]
args.pre_filt = sorted(args.pre_filt)
if (len(args.pre_filt)) != 4:
raise(Exception(
"Error: --pre-filt should contain 4 " +
"comma-separated floats"))
return args
def main(args=None):
if args is None:
# Run Input Parser
args = get_event_arguments()
# Load Database
# stdb>0.1.3
try:
db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys)
# stdb=0.1.3
except Exception:
db = stdb.io.load_db(fname=args.indb)
# Construct station key loop
allkeys = db.keys()
sorted(allkeys)
# Extract key subset
if len(args.stkeys) > 0:
stkeys = []
for skey in args.stkeys:
stkeys.extend([s for s in allkeys if skey in s])
else:
stkeys = db.keys()
sorted(stkeys)
# Loop over station keys
for stkey in list(stkeys):
# Extract station information from dictionary
sta = db[stkey]
# Define path to see if it exists
eventpath = Path('EVENTS') / Path(stkey)
if not eventpath.is_dir():
print('\nPath to '+str(eventpath)+' doesn`t exist - creating it')
eventpath.mkdir(parents=True)
# Establish client
if len(args.UserAuth) == 0:
client = Client(args.Server)
else:
client = Client(
args.Server, user=args.UserAuth[0], password=args.UserAuth[1])
# Get catalogue search start time
if args.startT is None:
tstart = sta.startdate
else:
tstart = args.startT
# Get catalogue search end time
if args.endT is None:
tend = sta.enddate
else:
tend = args.endT
if tstart > sta.enddate or tend < sta.startdate:
continue
# Temporary print locations
tlocs = sta.location
if len(tlocs) == 0:
tlocs = ['']
for il in range(0, len(tlocs)):
if len(tlocs[il]) == 0:
tlocs[il] = "--"
sta.location = tlocs
# Update Display
print("\n|===============================================|")
print("|===============================================|")
print("| {0:>8s} |".format(
sta.station))
print("|===============================================|")
print("|===============================================|")
print("| Station: {0:>2s}.{1:5s} |".format(
sta.network, sta.station))
print("| Channel: {0:2s}; Locations: {1:15s} |".format(
sta.channel, ",".join(tlocs)))
print("| Lon: {0:7.2f}; Lat: {1:6.2f} |".format(
sta.longitude, sta.latitude))
print("| Start time: {0:19s} |".format(
sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
print("| End time: {0:19s} |".format(
sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
print("|-----------------------------------------------|")
print("| Searching Possible events: |")
print("| Start: {0:19s} |".format(
tstart.strftime("%Y-%m-%d %H:%M:%S")))
print("| End: {0:19s} |".format(
tend.strftime("%Y-%m-%d %H:%M:%S")))
if args.maxmag is None:
print("| Mag: >{0:3.1f}".format(
args.minmag)+" |")
else:
print(
"| Mag: {0:3.1f} - {1:3.1f}".format(
args.minmag, args.maxmag)+" |")
print("| ... |")
# Get catalogue using deployment start and end
cat = client.get_events(
starttime=tstart, endtime=tend,
minmagnitude=args.minmag, maxmagnitude=args.maxmag)
# Total number of events in Catalogue
nevtT = len(cat)
print(
"| Found {0:5d}".format(nevtT) +
" possible events |")
# Select order of processing
ievs = range(0, nevtT)
if not args.reverse:
ievs = range(nevtT-1, -1, -1)
# Read through catalogue
for inum, iev in enumerate(ievs):
# Extract event
ev = cat[iev]
time = ev.origins[0].time
dep = ev.origins[0].depth
lon = ev.origins[0].longitude
lat = ev.origins[0].latitude
epi_dist, az, baz = epi(lat, lon, sta.latitude, sta.longitude)
epi_dist /= 1000.
gac = k2d(epi_dist)
mag = ev.magnitudes[0].mag
if mag is None:
mag = -9.
# Display Event Info
print("\n"+"*"*60)
print(
"* #({0:d}/{1:d}): {2:13s}".format(
inum+1, nevtT, time.strftime("%Y%m%d_%H%M%S")))
print(
"* Origin Time: " + time.strftime("%Y-%m-%d %H:%M:%S"))
print(
"* Lat: {0:6.2f}; Lon: {1:7.2f}".format(lat, lon))
print(
"* Dep: {0:6.2f}; Mag: {1:3.1f}".format(dep/1000., mag))
print(
"* Dist: {0:7.2f} km; {1:7.2f} deg".format(
epi_dist, gac))
# If distance outside of distance range:
if not (gac > args.mindist and gac < args.maxdist):
print(
"\n* -> Event outside epicentral distance " +
"range - continuing")
continue
t1 = time
t2 = t1 + args.window
# Time stamp
tstamp = str(time.year).zfill(4)+'.' + \
str(time.julday).zfill(3)+'.'
tstamp = tstamp + str(time.hour).zfill(2) + \
'.'+str(time.minute).zfill(2)
# Define file names (to check if files already exist)
filename = eventpath / (tstamp+'.pkl')
# Horizontal 1 channel
file1 = eventpath / (tstamp+'.'+sta.channel+'1.SAC')
# Horizontal 2 channel
file2 = eventpath / (tstamp+'.'+sta.channel+'2.SAC')
# Vertical channel
fileZ = eventpath / (tstamp+'.'+sta.channel+'Z.SAC')
# Pressure channel
fileP = eventpath / (tstamp+'.'+sta.channel[0]+'DH.SAC')
print("\n* Channels selected: " +
str(args.channels)+' and vertical')
# If data file exists, continue
if filename.exists():
if not args.ovr:
print("*")
print("* "+str(filename))
print("* -> File already exists - continuing")
continue
if "P" not in args.channels:
# Number of channels
ncomp = 3
# Comma-separated list of channels for Client
channels = sta.channel.upper() + '1,' + \
sta.channel.upper() + '2,' + \
sta.channel.upper() + 'Z'
# Get waveforms from client
try:
print("* "+tstamp +
" ")
print("* -> Downloading Seismic data... ")
sth = client.get_waveforms(
network=sta.network, station=sta.station,
location=sta.location[0], channel=channels,
starttime=t1, endtime=t2, attach_response=True)
print("* ...done")
except Exception:
print(
" Error: Unable to download ?H? components - " +
"continuing")
continue
st = sth
elif "12" not in args.channels:
# Number of channels
ncomp = 2
# Comma-separated list of channels for Client
channels = sta.channel.upper() + 'Z'
# Get waveforms from client
try:
print("* "+tstamp +
" ")
print("* -> Downloading Seismic data... ")
sth = client.get_waveforms(
network=sta.network, station=sta.station,
location=sta.location[0], channel=channels,
starttime=t1, endtime=t2, attach_response=True)
print("* ...done")
except Exception:
print(
" Error: Unable to download ?H? components - " +
"continuing")
continue
try:
print("* -> Downloading Pressure data...")
stp = client.get_waveforms(
network=sta.network, station=sta.station,
location=sta.location[0], channel='?DH',
starttime=t1, endtime=t2, attach_response=True)
print("* ...done")
if len(stp) > 1:
print("WARNING: There are more than one ?DH trace")
print("* -> Keeping the highest sampling rate")
print(
"* -> Renaming channel to " +
sta.channel[0] + "DH")
if stp[0].stats.sampling_rate > \
stp[1].stats.sampling_rate:
stp = Stream(traces=stp[0])
else:
stp = Stream(traces=stp[1])
except Exception:
print(" Error: Unable to download ?DH component - " +
"continuing")
continue
st = sth + stp
else:
# Comma-separated list of channels for Client
ncomp = 4
# Comma-separated list of channels for Client
channels = sta.channel.upper() + '1,' + \
sta.channel.upper() + '2,' + \
sta.channel.upper() + 'Z'
# Get waveforms from client
try:
print("* "+tstamp +
" ")
print("* -> Downloading Seismic data... ")
sth = client.get_waveforms(
network=sta.network, station=sta.station,
location=sta.location[0], channel=channels,
starttime=t1, endtime=t2, attach_response=True)
print("* ...done")
except Exception:
print(
" Error: Unable to download ?H? components - " +
"continuing")
continue
try:
print("* -> Downloading Pressure data...")
stp = client.get_waveforms(
network=sta.network, station=sta.station,
location=sta.location[0], channel='?DH',
starttime=t1, endtime=t2, attach_response=True)
print(" ...done")
if len(stp) > 1:
print("WARNING: There are more than one ?DH trace")
print("* -> Keeping the highest sampling rate")
print(
"* -> Renaming channel to " +
sta.channel[0] + "DH")
if stp[0].stats.sampling_rate > \
stp[1].stats.sampling_rate:
stp = Stream(traces=stp[0])
else:
stp = Stream(traces=stp[1])
except Exception:
print(" Error: Unable to download ?DH component - " +
"continuing")
continue
st = sth + stp
# Detrend, filter
st.detrend('demean')
st.detrend('linear')
st.filter('lowpass', freq=0.5*args.new_sampling_rate,
corners=2, zerophase=True)
st.resample(args.new_sampling_rate)
# Check streams
is_ok, st = utils.QC_streams(t1, t2, st)
if not is_ok:
continue
sth = st.select(component='1') + st.select(component='2') + \
st.select(component='Z')
# Remove responses
print("* -> Removing responses - Seismic data")
sth.remove_response(pre_filt=args.pre_filt, output=args.units)
# Extract traces - Z
trZ = sth.select(component='Z')[0]
trZ = utils.update_stats(
trZ, sta.latitude, sta.longitude, sta.elevation,
sta.channel+'Z', evla=lat, evlo=lon)
trZ.write(str(fileZ), format='SAC')
# Extract traces and write out in SAC format
# Seismic channels
if "12" in args.channels:
tr1 = sth.select(component='1')[0]
tr2 = sth.select(component='2')[0]
tr1 = utils.update_stats(
tr1, sta.latitude, sta.longitude, sta.elevation,
sta.channel+'1', evla=lat, evlo=lon)
tr2 = utils.update_stats(
tr2, sta.latitude, sta.longitude, sta.elevation,
sta.channel+'2', evla=lat, evlo=lon)
tr1.write(str(file1), format='SAC')
tr2.write(str(file2), format='SAC')
# Pressure channel
if "P" in args.channels:
stp = st.select(component='H')
print("* -> Removing responses - Pressure data")
stp.remove_response(pre_filt=args.pre_filt)
trP = stp[0]
trP = utils.update_stats(
trP, sta.latitude, sta.longitude, sta.elevation,
sta.channel[0]+'DH', evla=lat, evlo=lon)
trP.write(str(fileP), format='SAC')
else:
stp = Stream()
# # Write out EventStream object
# eventstream = EventStream(sta, sth, stp)
# eventstream.save(filename)
if __name__ == "__main__":
# Run main program
main()
|
from flask import Flask, request, url_for, session
import mysql.connector
from mysql.connector import (connection)
import re
import requests
from flask_session import Session
from passlib.hash import sha256_crypt
app = Flask(__name__)
app.config["DEBUG"] = True
app.secret_key = 'your secret key'
app.config['MYSQL_HOST'] = 'localhost'
app.config['MYSQL_USER'] = 'root'
app.config['MYSQL_PASSWORD'] = '@dmin@123'
app.config['MYSQL_DB'] = 'bookmyslot'
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
mysql = connection.MySQLConnection(user=app.config['MYSQL_USER'], password=app.config['MYSQL_PASSWORD'],
host=app.config['MYSQL_HOST'],
database=app.config['MYSQL_DB'])
@app.route('/moviesbycity', methods=['GET','POST'])
def moviesbycity():
if "cityname" in request.form :
cursor = mysql.cursor()
cursor.execute('SELECT * FROM MovieSchedules ms,Theatres th,Location l,SHOWTIME sh,Movies m \
WHERE ms.Movies_ID=m.Movies_ID \
AND ms.Theatres_ID=th.Theatres_ID \
AND ms.SHOWTIME_ID=sh.SHOWTIME_ID\
AND th.Location_ID=l.Location_ID\
AND l.LocationName = "'+(cityname)+'"')
movies = cursor.fetchall()
print(movies)
return(movies)
@app.route('/checkmoviesshowtime', methods=['GET','POST'])
def checkmoviesshowtime():
if "moviename" in request.form :
moviename = request.form['moviename']
cursor = mysql.cursor()
cursor.execute('SELECT * FROM MovieSchedules ms,Theatres th,Location l,SHOWTIME sh,Movies m \
WHERE ms.Movies_ID=m.Movies_ID \
AND ms.Theatres_ID=th.Theatres_ID \
AND ms.SHOWTIME_ID=sh.SHOWTIME_ID\
AND th.Location_ID=l.Location_ID\
AND m.Movies_Name= "'+(moviename)+'"')
movies = cursor.fetchall()
print(movies)
return(movies)
@app.route('/checkavailableseats', methods=['GET','POST'])
def checkavailableseats():
if "schedule_id" in request.form :
schedule_id = request.form['schedule_id']
cursor = mysql.cursor()
cursor.execute('SELECT ms.seats FROM MovieSchedules ms,Theatres th,Location l,SHOWTIME sh,Movies m \
WHERE ms.Movies_ID=m.Movies_ID \
AND ms.Theatres_ID=th.Theatres_ID \
AND ms.SHOWTIME_ID=sh.SHOWTIME_ID\
AND th.Location_ID=l.Location_ID\
AND m.Schedule_id= "'+(schedule_id)+'"')
seats = cursor.fetchall()[0]
print(seats)
return(seats)
@app.route('/signup', methods =['POST'])
def signup():
msg = ''
print(request.form.to_dict())
print(request.method)
print("username" in request.form.to_dict())
print("password" in request.form.to_dict())
if request.method == 'POST' and "username" in request.form and "password" in request.form:
username = request.form['username']
password = sha256_crypt.encrypt( request.form['password'])
cursor = mysql.cursor()
cursor.execute('SELECT * FROM Users WHERE UserName = "'+(username)+'"')
account = cursor.fetchone()
if account:
msg = 'Account already exists !'
# elif not re.match(r'[^@]+@[^@]+\.[^@]+', email):
# msg = 'Invalid email address !'
elif not re.match(r'[A-Za-z0-9]+', username):
msg = 'Username must contain only characters and numbers !'
elif not username: #or not password or not email:
msg = 'Please fill out the form !'
else:
cursor.execute('INSERT INTO Users(Username,Password) VALUES (%s, %s)', (username, password))
mysql.commit()
msg = 'You have successfully registered !'
elif request.method == 'POST':
msg = 'Please fill out the form !'
#return render_template('register.html', msg = msg)
print(msg)
return "<h1>"+msg+"</h1>"
@app.route('/login', methods=['GET','POST'])
def login():
msg = ''
print(request.form['username'])
print(request.form['password'])
if request.method == 'POST' and 'username' in request.form and 'password' in request.form:
username = request.form['username']
password = request.form['password']
cursor = mysql.cursor()
cursor.execute('SELECT * FROM Users WHERE UserName = %s', (username,))
Users = cursor.fetchone()
encrypted_password = Users[2]
if Users and sha256_crypt.verify(password, encrypted_password):
session["loggedin"] = True
session["id"] = Users[0]
session["username"] = Users[1]
msg = 'Logged in successfully !'
else:
msg = 'Incorrect username or password !'
print(msg)
return "<h1>"+msg+"</h1>"
@app.route('/logout', methods=['GET','POST'])
def logout():
session.pop('loggedin', None)
session.pop('id', None)
session.pop('username', None)
msg = 'Logged out successfully !'
print(msg)
return "<h1>"+msg+"</h1>"
@app.route('/bookticket', methods=['GET'])
def bookticket():
if "schedule_id" in request.form and "count" in request.form and "username" in request.form
and session["id"]!=null:
schedule_id = request.form['schedule_id']
count = int(request.form['count'])
cursor = mysql.cursor()
cursor.execute('SELECT ms.seats FROM MovieSchedules ms,Theatres th,Location l,SHOWTIME sh,Movies m \
WHERE ms.Movies_ID=m.Movies_ID \
AND ms.Theatres_ID=th.Theatres_ID \
AND ms.SHOWTIME_ID=sh.SHOWTIME_ID\
AND th.Location_ID=l.Location_ID\
AND ms.Schedule_id= "'+(schedule_id)+'"')
seats = cursor.fetchall()[0]
if(seats>count):
msg="<h1> The number of seats available are less than the count provided</h1>"
elif(count<=0) :
msg="<h1>Enter a valid number of seats</h1>"
else
try:
cursor.execute('Update INTO BookTicket(Users_ID,Schedule_id,count) VALUES (%s, %s, %d)', (session["id"],schedule_id,count))
cursor.execute('Update INTO MovieSchedules ms set ms.seats=ms.seats-'+count+' where ms.Schedule_id='schedule_id)
mysql.commit()
msg="<h1>Booking is successfull</h1>"
except:
mysql.rollback()
msg=" <h1>Error occured rollbacking</h1>"
elif (session["id"]==null):
msg=" <h1>Please login to continue!</h1>"
return(msg)
app.run()
|
from django.urls import path
from.import views
urlpatterns=[
path('',views.home,name="home"),
path('register',views.register,name='register'),
path('login',views.login,name='login'),
path('logout',views.logout,name='logout'),
path('arts',views.arts,name='arts'),
] |
from Persons import *
from random import *
# from endings import *
from FUFS import *
def getStatus(self):
self.player1.showName()
self.player1.showHappyStatus()
self.player1.showParentStatus()
self.player1.showMarriedStatus()
self.player1.showAliveStatus()
self.player1.showGood()
self.player1.showBad()
return
def getDivorced(self):
self.player1.setHappyStatus(randint(1, 10))
self.player1.setParentStatus(1)
self.player1.setMarriedStatus(0)
self.player1.setAliveStatus(1)
print("You have gotten a divorce...")
if randint(0, 9800) > 9500:
print("It's not that bad...\nYou keep your child...")
self.player1.setHappyStatus(randint(50, 80))
self.player1.setParentStatus(1)
self.player1.setMarriedStatus(0)
self.player1.setAliveStatus(1)
self.refreshStats()
return
else:
self.rockBottom()
self.refreshStats()
return
def rockBottom(self):
self.player1.setHappyStatus(0)
self.player1.setParentStatus(0)
self.player1.setMarriedStatus(0)
self.player1.setAliveStatus(1)
print("You have hit rock bottom...")
if randint(0, 25) < 10:
self.blowBrains()
self.refreshStats()
return
else:
print("You will start over...")
self.player1.setHappyStatus(randint(5, 65))
self.player1.setParentStatus(0)
self.player1.setMarriedStatus(0)
self.player1.setAliveStatus(1)
self.refreshStats()
return
def getSeparated(self):
self.player1.setHappyStatus(25)
self.player1.setParentStatus(0)
self.player1.setMarriedStatus(1)
self.player1.setAliveStatus(1)
print("You have separated...\n")
self.decideFate()
self.refreshStats()
return
def getHelp(self):
self.player1.setHappyStatus(75)
self.player1.setParentStatus(1)
self.player1.setMarriedStatus(1)
self.player1.setAliveStatus(1)
print("You have sought help...\n")
if randint(0, 25) < 15:
print("Marriage therapy failed...\n")
self.getSeparated()
self.refreshStats()
return
else:
print("Marriage therapy was successful!\n")
self.refreshStats()
return
def decideFate(self):
print("The courts will decide...\n")
if randint(0, 100) < 60:
print("Shit did not go in your favor...")
self.getDivorced()
self.refreshStats()
return
else:
print("The courts order you to marriage therapy...\n")
self.getHelp()
self.refreshStats()
return
# Might be buggy
def happyEnding(self):
if self.player1.getAliveStatus() == 1 and self.player1.getParentStatus() == 1 and \
self.player1.getHappyStatus() >= 30:
print("Rejoice, everything will be A OK!!!!\n")
self.refreshStats()
return
else:
if self.player1.getAliveStatus() == 1 and self.player1.getParentStatus() == 0 and \
self.player1.getHappyStatus() < 30 and randint(0, 25) > 1:
print("Why even try anymore...")
self.blowBrains()
print("You Died... The End\n")
self.refreshStats()
return
else:
print("It will be ok.\n") # Bug ID: Does even if dead. Create con stmt in main()
self.refreshStats()
return
|
import argparse
import logging
import os
import sys
from core.logger import LOGGER_NAME, setup_logging
from vincere import VincereAPI
from vincere.candidate import CandidateAPI
logger = logging.getLogger(LOGGER_NAME)
class MyParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
def initialize(self):
group = self.add_argument_group('Requests')
group.add_argument('-m', '--master', action="store_true", help="Generate Master Records", default=False)
group.add_argument('-c', '--candidate', action="store", help="Candidate ID", dest="candidate")
group.add_argument('-d', '--delete', help="Space separated candidate list.", dest="delete",
nargs='+')
group.add_argument('-r', '--reason', action="store", help="Reason to delete candidates.", dest="reason")
group.add_argument('-i', '--industries', action="store", help="Space separated industries list.",
dest="industries", nargs='+')
group.add_argument('-e', '--expertise', action="store", help="Space separated function expertise list.",
dest="expertise", nargs='+')
group.add_argument('-s', '--sub', action="store", help="Space separated Sub function expertise list.",
dest="sub", nargs='+')
group.add_argument('-f', '--functional', action="store", help="Functional Expertise ID.", dest="functional_id")
group.add_argument('-l', '--company_count', action="store", help="Company count.", dest="company_count")
self.add_argument('-n', '--note', action="store_true", default=False, help="Get Candidate note")
self.add_argument('-v', '--verbose', action="store_true", default=False, help="Print verbose logging on screen")
if len(sys.argv) == 1:
self.print_help()
sys.exit(1)
def validate(self):
result = self.parse_args()
if result.master:
return result
if not result.delete and not result.candidate:
self.error("Missing Candidate ID")
if result.delete and not result.reason:
self.error("Missing Reason")
if result.sub and not result.functional_id:
self.error("Missing Functional Expertise ID")
return result
def main1():
p = CandidateAPI()
industries = ['28735', '28885', '28886']
candidate_id = 79255
# p.set_industries(candidate_id=candidate_id, industries=industries)
# p.set_company_count(candidate_id=candidate_id, company_counts=3)
expertises = ['2992', '2995', '2996']
# p.set_functional_expertise(candidate_id=candidate_id, expertises=expertises)
functional_expertise_id = 2992
sub_expertises = ['200', '198']
p.set_sub_functional_expertise(candidate_id=candidate_id, functional_expertise_id=functional_expertise_id,
expertises=sub_expertises)
# v = VincereAPI(server_url="https://headhuntr.vincere.io/")
# masters = ['industries', 'functionalexpertises']
# sub_masters = {
# 'functionalexpertises': {
# 'key': 'functionalexpertise',
# 'sub_key': 'subfunctionalexpertises',
# }
# }
# v.generate_master_data(masters=masters, sub_masters=sub_masters)
def generate_master():
logger.info("generating Master records")
api_client = VincereAPI()
masters = ['industries', 'functionalexpertises']
sub_masters = {
'functionalexpertises': {
'key': 'functionalexpertise',
'sub_key': 'subfunctionalexpertises',
}
}
api_client.generate_master_data(masters=masters, sub_masters=sub_masters)
return True
def delete_candidates(candidates=[], reason=''):
if not candidates:
raise Exception("no candidates specified to delete")
logger.info("Deleting '%s' candidates", ",".join(candidates))
api_candidate = CandidateAPI()
api_candidate.bulk_delete_candidates(candidate_ids=candidates, reason=reason)
def main(result):
if result.master:
return generate_master()
if result.delete:
return delete_candidates(candidates=result.delete, reason=result.reason)
if result.candidate:
api_candidate = CandidateAPI()
if result.note:
return {
"note": api_candidate.get_note_on(
candidate_id=result.candidate
)
}
if result.company_count:
return api_candidate.set_company_count(
candidate_id=result.candidate,
company_counts=result.company_count
)
if result.industries:
return api_candidate.set_industries(
candidate_id=result.candidate,
industries=result.industries
)
if result.expertise:
return api_candidate.set_functional_expertise(
candidate_id=result.candidate,
expertises=result.expertise
)
if result.sub:
return api_candidate.set_sub_functional_expertise(
candidate_id=result.candidate,
functional_expertise_id=result.functional_id,
expertises=result.sub
)
if __name__ == '__main__':
os.chdir(os.path.dirname(os.path.abspath(__file__)))
parser = MyParser(add_help=True)
parser.initialize()
result = parser.validate()
setup_logging(scrnlog=result.verbose)
print(result)
print(main(result=result))
|
#! /usr/bin/env python
for i in range(2):
print "this is before continue"
continue
print "this is after the countinue statement "
|
import pandas as pd
import matplotlib.image as mpimg
from matplotlib import pyplot as plt
import numpy as np
from io import BytesIO
from PIL import Image
import base64
import keras
from keras.models import Sequential
from keras.layers.normalization import BatchNormalization
from keras.layers.core import Dense, Activation, Flatten
from keras.layers import Convolution2D
from keras.layers.pooling import MaxPooling2D
from keras.layers.convolutional import Cropping2D
from keras.layers import Lambda
from keras.layers.core import Dropout
from keras.optimizers import Adam
# Folder with the training data
simulator_output_folder = "training_data_final"
# Reading driving log into a pandas data frame for processing
driving_log = pd.read_csv(simulator_output_folder + '/driving_log.csv', header=None)
def read_image(file):
""" Return the image data from file path as numpy array"""
image = Image.open(file)
return np.asarray(image)
# Changing image file paths to simulator_output_folder
# Useful when the model is trained on a remote machine
driving_log[0] = driving_log[0].apply(lambda x: simulator_output_folder + '/IMG/' + x.split('/')[-1])
driving_log[1] = driving_log[1].apply(lambda x: simulator_output_folder + '/IMG/' + x.split('/')[-1])
driving_log[2] = driving_log[2].apply(lambda x: simulator_output_folder + '/IMG/' + x.split('/')[-1])
#Splitting data into training and validation set
from sklearn.cross_validation import train_test_split
data_train, data_test = train_test_split(driving_log, test_size=0.20, random_state=42)
# Defining the model for the task
model = Sequential()
# Crop top and bottom sections of the image
model.add(Cropping2D(cropping=((50,20), (0,0)), input_shape=(160,320,3)))
# Normalize the image
model.add(Lambda(lambda x: (x / 255.0) - 0.5))
# First convolution layer
model.add(Convolution2D(24, 5, 5,
border_mode='valid',
subsample=(2, 2)))
model.add(Activation('relu'))
# Second convolution layer
model.add(Convolution2D(36, 5, 5,
border_mode='valid',
subsample=(2, 2)))
model.add(Activation('relu'))
# Third convolution layer
model.add(Convolution2D(48, 5, 5,
border_mode='valid',
subsample=(2, 2)))
model.add(Activation('relu'))
# Fourth convolution layer
model.add(Convolution2D(64, 3, 3,
border_mode='valid'))
model.add(Activation('relu'))
# Fifth convolution layer
model.add(Convolution2D(64, 3, 3,
border_mode='valid'))
model.add(Activation('relu'))
# Flatten output of previous layer to 1D array
model.add(Flatten())
# Dropout for reducing overfitting
model.add(Dropout(0.25))
# First fully connected layer
model.add(Dense(1164, activation="relu"))
# Second fully connected layer
model.add(Dense(100))
model.add(Activation('relu'))
# Third fully connected layer
model.add(Dense(50))
model.add(Activation('relu'))
# Fourth fully connected layer
model.add(Dense(10))
model.add(Activation('relu'))
# Output layer
model.add(Dense(1))
def input_generator(driving_log):
"""Get training data in batches of 129"""
while 1:
driving_log = driving_log.sample(frac=1)
count = 0
images = []
steerings = []
for row in driving_log.itertuples():
image = read_image(row[1])
images.append(image)
steerings.append(row[4])
#Left camera image
image_left = read_image(row[2])
images.append(image_left)
steerings.append(row[4] + 0.2)
#Right camera image
image_right = read_image(row[3])
images.append(image_right)
steerings.append(row[4] - 0.2)
count += 3
if (count == 129):
yield((np.asarray(images), np.asarray(steerings)))
count = 0
images = []
steerings = []
yield((np.asarray(images), np.asarray(steerings)))
def validation_generator(driving_log):
"""Get validation data in batches of 128"""
while 1:
driving_log = driving_log.sample(frac=1)
count = 0
images = []
steerings = []
for row in driving_log.itertuples():
#Center
image = read_image(row[1])
images.append(image)
steerings.append(row[4])
count += 1
if (count == 128):
yield((np.asarray(images), np.asarray(steerings)))
count = 0
images = []
steerings = []
yield((np.asarray(images), np.asarray(steerings)))
# Train model using Adam optimizer and MSE loss
model.compile(Adam(lr=0.0001), 'mse', ['mean_squared_error'])
history = model.fit_generator(input_generator(data_train), samples_per_epoch=len(data_train)*3, nb_epoch=5, validation_data=validation_generator(data_test), nb_val_samples=len(data_test))
# Saving trained model for simulator
model.save('model.h5')
|
import pandas as pd
import matplotlib.pyplot as plt
xlsx = pd.ExcelFile(r'D:\isi7\Python\1cwiczenia\wd_cw8\imiona.xlsx')
df = pd.read_excel(xlsx)
Kob = df[df['Plec']==('K')].agg({'Liczba':['sum']})
Men = df[df['Plec']==('M')].agg({'Liczba':['sum']})
lista = df.groupby(['Plec']).agg({'Liczba':['sum']})
etykiety = ['K','M']
wartosci = [Kob.iat[0,0],Men.iat[0,0]]
plt.subplot(1,3,1)
plt.bar(etykiety,wartosci)
plt.subplot(1,3,2)
plt.subplot(1,3,3)
plt.show()
|
'''
.. py:module:: tinyurl
:platform: Unix
Shorten URL by querying Tinyurl.com
'''
import urllib
base_url = 'http://tinyurl.com/api-create.php?url='
def get(url):
'''Shorten given URL.
:param url: URL to shorten
:type url: str
:returns: str -- URL shortened by querying tinyurl.com
'''
compound_url = base_url + url
return urllib.urlopen(compound_url).read()
|
from __future__ import annotations
from prettyqt import core
from prettyqt.qt import QtCharts
class AbstractSeriesMixin(core.ObjectMixin):
pass
class AbstractSeries(AbstractSeriesMixin, QtCharts.QAbstractSeries):
pass
|
f = open('email.txt')
try:
f.write() # raise Exception()
except Exception:
pass
finally:
f.close() # Never closed |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.