blob_id stringlengths 40 40 | language stringclasses 1
value | repo_name stringlengths 5 133 | path stringlengths 2 333 | src_encoding stringclasses 30
values | length_bytes int64 18 5.47M | score float64 2.52 5.81 | int_score int64 3 5 | detected_licenses listlengths 0 67 | license_type stringclasses 2
values | text stringlengths 12 5.47M | download_success bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
e247ca653220cf7b2dad17f8abbc0229044d6334 | Python | shuoGG1239/ui_shell | /QLineEditPro.py | UTF-8 | 851 | 2.640625 | 3 | [] | no_license | from PyQt5.QtWidgets import QLineEdit
from PyQt5.QtCore import QFileInfo
class QLineEditPro(QLineEdit):
"""
增强版QlineEdit: 能接收拖拽过来的文件并将文本set为路径
"""
def __init__(self, *__args):
super(QLineEditPro, self).__init__(*__args)
self.setAcceptDrops(True)
def dragEnterEvent(self, event):
if (event.mimeData().hasUrls()):
event.acceptProposedAction()
def dragMoveEvent(self, event):
if (event.mimeData().hasUrls()):
event.acceptProposedAction()
def dropEvent(self, event):
if (event.mimeData().hasUrls()):
urlList = event.mimeData().urls()
fileInfo = QFileInfo(urlList[0].toLocalFile())
self.setText(fileInfo.filePath())
event.acceptProposedAction()
| true |
65ee299fb8f1d567aef7dcc685771bd216eeb513 | Python | KWONILCHEOL/Python | /코드업 100제/1075.py | UTF-8 | 60 | 2.78125 | 3 | [] | no_license | a = int(input())
for x in reversed(range(a)):
print(x)
| true |
77c0575ad68d541962b5c0bc53e7a25c7beee7b4 | Python | camwin/bots | /TurtleBot.py | UTF-8 | 10,546 | 2.828125 | 3 | [] | no_license | import rg, random
#TODO
# Figure out way to prevent more than one bot from locking up with an enemy. Derive way for
# bots to gang up effectively.
# a. Make an 'engaged' list, that contains list of bots currently 'attacking' a square
# i. Have bots not close to enemies simply reference the closest friendly in engaged list then move to that closest enemy
#TODO
# DEBUG THIS --> Add check for Friendly's blocking Friendly's SpawnCheck (and other Friendly blocks)
#TODO
# DEBUG THIS --> Last resort SUICIDE for those completely blocked in Spawn
#TODO
# Print stats summary at end of each turn 99 or end game
# Is enemy in the center?
def IsCenterAvailable(self,game):
for loc,bot in game.robots.items():
if bot.player_id != self.player_id:
if self.location == rg.CENTER_POINT:
return False
else:
return True
# find closest enemy - Method from space-cadet
def GetClosestEnemy(self):
for loc, bot in self.game.get('robots').items():
if bot.player_id != self.player_id:
if rg.wdist(loc, self.location) <= rg.wdist(self.location, self.closestEnemy):
self.closestEnemy = loc
#print "Enemy = %d, %d" %self.closestEnemy
return self.closestEnemy
# find closest friend - Method from space-cadet
def GetClosestFriendly(self):
for loc, bot in self.game.get('robots').items():
if bot.player_id == self.player_id:
if rg.wdist(loc, self.location) <= rg.wdist(self.location, self.closestFriend):
self.closestFriend = loc
#print "Friend = %d" %self.closestFriend
return self.closestFriend
# Returns list of all enemy locations - Hat tip towards Khal Robo
def EnemyLocations(self,game):
enemyLocs = []
for loc, bot in game.robots.items():
if bot.player_id != self.player_id:
enemyLocs.append(loc)
#print enemyLocations
return enemyLocs
# Returns list of all Friendly locations
def FriendlyLocations(self,game):
friendlyLocs = []
for loc, bot in game.robots.items():
if bot.player_id == self.player_id:
friendlyLocs.append(loc)
#print friendlyLocs
return friendlyLocs
# Suicide, but only if its worth it, bro - Method borrowed from Khal Robo, borrowed from ExSpace
def HonorableDeath(self, game):
# if there are 3+ enemies around, suicide! (code stolen from the ExSpace robot because this is my first day using python)
aroundE = 0
for loc, bot in game.robots.items():
if bot.player_id != self.player_id:
if rg.wdist(loc, self.location) <= 1:
aroundE += 1
if aroundE >= 3 and self.hp < 41:
print "kaboom (3+)"
self.botSuicide += 1
return True
# if health is low, suicide for fewer enemies
if aroundE == 2 and self.hp < 21:
self.botSuicide += 1
print "kaboom (2)"
return True
# Run away!!!
def ItsNotWorthItBro(self,game):
#for loc,bot in game.robots.items():
# if bot.player_id == self.player_id:
if self.hp <= 15:
if rg.wdist(self.location, GetClosestEnemy(self)) <= 2:
#if rg.wdist(self.location, GetClosestFriendly(self)) > 2:
print "Just walk away bro, it's not worth it (%d, %d)" %self.location
return True
# Returns a list of adjacent areas
def listOfGoodMoves(loc):
GoodLocsAround = []
GoodLocsAround = rg.locs_around(loc, filter_out=('invalid', 'obstacle'))
#print "List of good locations to move to: ", GoodLocsAround
return GoodLocsAround
# Just prints suicide stats
def SuicideStats(self,stat):
print "%d bots died honorably" %stat
#Check to see if bot is in spawn and spawn-turn coming up.
def SpawnKillCheck(self,game):
if game.turn % 10 in [7, 8, 9, 0] and 'spawn' in rg.loc_types(self.location):
return True
# Method that converts a bad move into a good one (think spin-move around defender in football/basketball)
def SpinMove(self,loc):
randomMoveMod = [-1, 1]
if self.location[0] == loc[0]:
juke = loc[0]+random.choice(randomMoveMod), loc[1]
else:
juke = loc[0], loc[1]+random.choice(randomMoveMod)
return juke
# Prediction method, returns a location that would be the shortest path between your bot and the closestEnemy
def TheForce(self,game,myLoc, enemyLoc):
possibleMoves = []
bestPrediction = 0
bestMoveSoFar = 100
possibleMoves = listOfGoodMoves(enemyLoc)
for loc in possibleMoves:
if rg.wdist(self.location, loc) < bestMoveSoFar:
bestPrediction = loc
return bestPrediction
# I love being a turtle!
def TurtleMode(self,game):
#for loc,bot in game.robots.items():
# if bot.player_id == self.player_id:
if self.hp <= 15:
if rg.wdist(self.location, GetClosestEnemy(self)) == 1:
if rg.wdist(self.location, GetClosestFriendly(self)) > 1:
print "Bot at %d %d entered turtle mode" %self.location
return True
class Robot:
botSuicide = 0
def act(self, game):
#init vars
self.game = game
#Create list of enemy locations
self.enemyLocations = EnemyLocations(self,game)
self.friendlyLocations = FriendlyLocations(self,game)
#Initial vars for closestEnemy/Friend
self.closestEnemy = (1000, 1000)
self.closestFriend = (1000, 1000)
#print "Enemy List: " , self.enemyLocations
#print "Friendly List: " , self.friendlyLocations
####### Print Stats #######
if game.turn == 99:
SuicideStats(self,self.botSuicide)
####### Actions Prioritized Highest to Lowest #######
# If spawn turn coming up, check if in spawn and make a good move toward center
#(Old) If spawn turn coming up, try to go to Center
if SpawnKillCheck(self,game):
# first, check if move toward center is where an enemy is standing, SpinMove
if rg.toward(self.location, rg.CENTER_POINT) in self.enemyLocations:
jukeAroundEnemyFromSpawn = rg.toward(self.location,SpinMove(self,rg.toward(self.location, rg.CENTER_POINT)))
print "Enemy in the way, trying to juke to ", jukeAroundEnemyFromSpawn
if jukeAroundEnemyFromSpawn in self.enemyLocations:
print "GET OFF ME BRO! Boxed in, attacking or suiciding", jukeAroundEnemyFromSpawn
if self.hp <= 12:
self.botSuicide += 1
return ['suicide']
return ['attack', jukeAroundEnemyFromSpawn]
return ['move', rg.toward(self.location,jukeAroundEnemyFromSpawn)]
# next, check if move toward center is where a Friendly is standing, SpinMove
if rg.toward(self.location, rg.CENTER_POINT) in self.friendlyLocations:
jukeAroundFriendlyFromSpawn = rg.toward(self.location,SpinMove(self,rg.toward(self.location, rg.CENTER_POINT)))
print "Friendly in the way, trying to juke to ", jukeAroundFriendlyFromSpawn
return ['move', rg.toward(self.location,jukeAroundFriendlyFromSpawn)]
# if move is clear, just do it
print "Spawn coming up, dipping toward center from (%d, %d)" %self.location
return ['move', rg.toward(self.location, rg.CENTER_POINT)]
#If low on health and close to enemy, suicide
if HonorableDeath(self, game):
return ['suicide']
# if no buddy is near and health is below 10 (changed from 15 5-14-14), go to guarding
#if TurtleMode(self,game):
# print "Don't hurt me, bro! (%d, %d)" %self.location
# return ['guard']
#If an enemy is close, Attack. Otherwise, juke to center
#print "self.location, GetClosestEnemy", self.location, " ", GetClosestEnemy(self)
if rg.wdist(self.location, GetClosestEnemy(self)) == 1:
print "Attacking: ", GetClosestEnemy(self)
return ['attack', GetClosestEnemy(self)]
if ItsNotWorthItBro(self,game):
if rg.toward(self.location, rg.CENTER_POINT) in self.friendlyLocations:
jukeAroundFriendly = rg.toward(self.location,SpinMove(self,rg.toward(self.location, rg.CENTER_POINT)))
print "Friendly in the way of escape to center, trying to juke to ", jukeAroundFriendly
return ['move', jukeAroundFriendly]
if rg.toward(self.location, rg.CENTER_POINT) in self.enemyLocations:
jukeAroundEnemy = rg.toward(self.location,SpinMove(rg.toward(self.location, rg.CENTER_POINT)))
print "Enemy in the way of escape to center, trying to juke to ", jukeAroundEnemy
return ['move', jukeAroundEnemy]
# Get 1 square away from enemy, then attack square that enemy may move to.
# Determine if enemy is 2 paces away (i.e., one pace before striking distance)
# space-cadet, with prediction - 15-5, 12-6 | Without 10-3, 10-3
# StarBot with - 18-8, 18-8| Without 11-4, 17-2
if rg.wdist(self.location, GetClosestEnemy(self)) == 2:
#determine most probable move enemy will take toward you
#print "Predicting enemy will move to (%d, %d)" %TheForce(self, game, self.location, GetClosestEnemy(self))
return ['attack', rg.toward(self.location, TheForce(self, game, self.location, GetClosestEnemy(self)))]
#If an enemy is not close, move towards one
#check if enemy is more than one pace away
if rg.wdist(self.location, GetClosestEnemy(self)) > 1:
#if step towards enemy is unblocked by friendly, make the move
if rg.toward(self.location, GetClosestEnemy(self)) not in self.friendlyLocations:
print "Moving toward nearest enemy via (%d, %d)" %rg.toward(self.location, GetClosestEnemy(self))
return ['move', rg.toward(self.location, GetClosestEnemy(self))]
#if step is blocked by friendly, SpinMove will modify it one way or the other.
else:
print "Friendly in the way, SpinMove around him at (%d, %d)" %rg.toward(self.location, GetClosestEnemy(self))
return ['move', rg.toward(self.location,SpinMove(self,rg.toward(self.location, GetClosestEnemy(self))))]
#old return ['move', rg.toward(self.location, rg.CENTER_POINT)]
print "Nothing to do, guarding at ", self.location
return ['guard'] | true |
4888a595b4b8e352faa2ff5385d1e46623732b4b | Python | TWItachi/python- | /pa r 4.py | UTF-8 | 377 | 2.59375 | 3 | [] | no_license | import requests
import re
headers = {
'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
}
r = requests.get("https://www.zhihu.com/explore", headers=headers)
pattern = re.compile('explore-feed.*?question_link.*?>(.*>)</a>', re.S)
titles = re.findall(pattern, r.text)
print(titles) | true |
9ccbe53b25e47ae695b5e8bc3302943b63295574 | Python | codebendercc/compiler | /Symfony/app/Resources/autocompletion/request.py | UTF-8 | 3,113 | 2.609375 | 3 | [
"BSD-3-Clause",
"MIT",
"BSD-2-Clause"
] | permissive | import json
from complete import Completer, CodeCompletionResults
from response import Response
from logger import *
def _read_json_file(p):
try:
with open(p, 'r') as f:
s = f.read()
except IOError as e:
log_error(REQ_IO_ERROR)
return s
def _load_json_string(s):
try:
d = json.loads(s)
except:
log_error(REQ_JSON_LOADS_ERROR)
return d
def _parse_json_data(d):
try:
fname = d['file']
line = d['row'];
column = d['column'];
prefix = d['prefix'];
cmd = d['command'].split()
valid = (isinstance(fname, str) or isinstance(fname, unicode)) and \
(isinstance(cmd[0], str) or isinstance(cmd[0], unicode)) and \
(isinstance(prefix, str) or isinstance(prefix, unicode)) and \
isinstance(line, int) and (isinstance(column, int))
if not valid:
log_error(REQ_INV_TYPES)
except KeyError as e:
log_error(REQ_KEY_ERROR)
except AttributeError as e:
log_error(REQ_ATTRIB_ERROR)
# Remove single quotes in filenames and update column position
# base on the prefix's length
return (fname.replace("'", ""), line, column - len(prefix), prefix,
[str(x.replace("'", "")) for x in cmd])
def correct_clang_arguments(fname, args):
clang_args = ['-c ' + fname]
# find includes & defines
for arg in args:
if arg.startswith('-I') or arg.startswith('-D'):
clang_args.append(arg)
return clang_args
def file_len(fname):
with open(fname) as f:
for i, l in enumerate(f):
pass
return i + 1
def has_ino_origin(fname):
import os.path
return (fname.endswith('.cpp') or fname.endswith('.c')) and \
os.path.isfile(fname[:-4] + '.ino')
def calculate_line_diff(fname):
ln_diff = 0
# if the cpp file was produced from an ino file
# we need to calculate the correct line difference
if has_ino_origin(fname):
cpp_lines = file_len(fname)
ino_lines = file_len(fname[:-4] + '.ino')
ln_diff = cpp_lines - ino_lines
return ln_diff
class Request(object):
def __init__(self, path):
s = _read_json_file(path)
d = _load_json_string(s)
self.fname, self.line, self.column, self.prefix, cmd = _parse_json_data(d)
self.args = correct_clang_arguments(self.fname, cmd)
def get_response(self):
self.line = self.line + calculate_line_diff(self.fname)
completer = Completer(self.fname, self.line, self.column, self.args)
code_completion = CodeCompletionResults(completer.code_completion)
return Response(code_completion, self.prefix);
def __str__(self):
ret = ''
ret = ret + 'file name: ' + self.fname + '\n'
ret = ret + 'line: ' + str(self.line) + '\n'
ret = ret + 'column: ' + str(self.column) + '\n'
ret = ret + 'prefix: ' + str(self.prefix) + '\n'
ret = ret + 'args:\n'
for arg in self.args:
ret = ret + '\t' + arg + '\n'
return ret
| true |
28473a37d38a5cc2bed22d2325ae3d3f2746b9cb | Python | kesav21/project-euler | /python/p027 | UTF-8 | 832 | 3.375 | 3 | [] | no_license | #!/usr/bin/env python
from tools import sieve
from timeit import timeit
def main():
upper_a = 1000
upper_b = 1000
brange = set(sieve(upper_b))
primes = set(sieve(f(upper_b, upper_a, upper_b)))
p = permutations(brange)
for (a, b) in p:
if prime_check(a, b, primes):
print(a, b, prime_count(a, b, primes))
def permutations(brange):
return (
(a, b) for b in brange for a in range(-2 * int(b ** 0.5), 2 * int(b ** 0.5))
)
def prime_count(a, b, primes):
for n in range(1, b + 1):
if f(n, a, b) not in primes:
return n
def prime_check(a, b, primes):
for n in range(1, 40):
if f(n, a, b) not in primes:
return False
return True
def f(n, a, b):
return n ** 2 + a * n + b
if __name__ == "__main__":
main()
| true |
a02674f0fb121fd4a07dd4b2e169647f8811af89 | Python | 173647085/practice | /homework2/2_8.py | UTF-8 | 478 | 4.0625 | 4 | [] | no_license | #8 请用Python定义一个函数,给定一个字符串,找出该字符串中出现次数最多的那个字符,并打印出该字符及其出现的次数。
import string
str1="hrtftdghed"
str2=string.ascii_letters#生成所有字母
def count():
dict1={}
for letter in str2:
dict1[letter]=0
print(dict1)
for i in str1:
dict1[i]+=1
for count in str2:
if dict1[count]!=0:
print("%s:%d"%(count,dict1[count]))
count() | true |
7d1b8706a981ac865d79235e4f5acbc4751c91f7 | Python | Phimos/Comments-Mining-System-for-Scholar-Citations | /citeminer/utils/__init__.py | UTF-8 | 12,218 | 2.609375 | 3 | [] | no_license | import json
import os
from copy import deepcopy
from functools import partial
from multiprocessing import Pool
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Sized, Tuple
import filetype
import pdfx
import requests
from citeminer.crawlers.aminer import AMinerCrawler
from citeminer.crawlers.downloader import BaseDownlaoder
from citeminer.pdfparser.pdf2txt import extract_text
from citeminer.utils.markdown_writer import CitingDocument
from fuzzywuzzy import fuzz, process
from tqdm import tqdm
def apply_func(
func: Callable,
iterator: Iterable,
parallel: bool = False,
processes: int = 4,
) -> List[Any]:
"""Apply function on all items in an iterator
It's simply a warpped map function, provide a progress bar to track real-time
progress and estimated completion time and support parallel processing. (Serial
mode is used by default)
Args:
func: A callable processing function
iterator: An iterable variable (list in citeminer) containing tasks need
to be processed
parallel: A boolean indicating whether to use parallel processing
processes: An integer used to indicate the number of concurrent processes,
only valid when parallel is True
Returns:
A list containing all return values
"""
result = []
if parallel:
pool = Pool(processes)
result = list(
tqdm(
pool.imap(func, iterator),
total=len(iterator),
)
)
pool.close()
else:
for item in tqdm(iterator):
result.append(func(item))
return result
# Json Load & Dump
def dump_json(obj: Any, file_path: str) -> None:
os.makedirs(os.path.dirname(file_path), exist_ok=True)
fp = open(file_path, "w")
json.dump(obj, fp, sort_keys=True, indent=4, separators=(",", ":"))
fp.close()
def load_json(file_path: str) -> Dict[str, Any]:
assert file_path.endswith(".json")
fp = open(file_path)
result = json.load(fp)
fp.close()
return result
# Fuzzy Match
def fuzzy_match(str1: str, str2: str, threshold: int = 85) -> bool:
str1 = str1.lower()
str2 = str2.lower()
return fuzz.ratio(str1, str2) >= threshold
def fuzzy_extract_one(
query: str,
choices: List[str],
scorer: Callable = fuzz.token_set_ratio,
threshold: int = 85,
) -> Tuple[bool, str]:
ret = process.extractOne(query, choices, scorer=scorer, score_cutoff=threshold)
if ret is not None:
return True, ret[0]
else:
return False, ""
# Config Control
def search_metadata_dir(root_dir: str) -> Dict[str, Dict[str, List]]:
result: Dict = {}
for author in os.listdir(root_dir):
result[author] = {}
pub_dir = os.path.join(root_dir, author, "publications")
for pub in os.listdir(pub_dir):
result[author][pub] = []
cpub_dir = os.path.join(pub_dir, pub, "cited")
for cpub in os.listdir(cpub_dir):
cpub, *_ = os.path.splitext(cpub)
result[author][pub].append(cpub)
return result
def merge_local_user_data(
local_data: Dict[str, Dict[str, List]], user_guide: List[Dict[str, Any]]
) -> Dict[str, Dict[str, List]]:
result: Dict = {}
for author_info in user_guide:
ok, author = fuzzy_extract_one(author_info["name"], list(local_data.keys()))
if not ok:
continue
if "publications" not in author_info.keys():
result[author] = deepcopy(local_data[author])
else:
result[author] = {}
for pub_name in author_info["publications"]:
ok, pub = fuzzy_extract_one(pub_name, list(local_data[author].keys()))
if ok:
result[author][pub] = deepcopy(local_data[author][pub])
return result
def generate_tasks(
root_dir: str,
task_type: str = "cpub",
user_guide_info: Optional[List] = None,
) -> List[Any]:
tasks: List[Any] = []
data = search_metadata_dir(root_dir)
if user_guide_info is not None:
data = merge_local_user_data(data, user_guide_info)
assert task_type in ["author", "pub", "cpub"]
if task_type == "author":
for author in data.keys():
tasks.append((author))
elif task_type == "pub":
for author in data.keys():
for pub in data[author].keys():
tasks.append((author, pub))
elif task_type == "cpub":
for author in data.keys():
for pub in data[author].keys():
for cpub in data[author][pub]:
tasks.append((author, pub, cpub))
else:
raise ValueError
return tasks
def get_cpub_path(root_dir: str, author: str, pub: str, cpub: str, postfix: str) -> str:
return os.path.join(root_dir, author, "publications", pub, "cited", cpub + postfix)
def makepardirs(file_path: str) -> None:
os.makedirs(os.path.dirname(file_path), exist_ok=True)
def convert2txt(task: Tuple, pdf_dir: str, txt_dir: str) -> None:
"""Convert pdf documents to txt format
(CPub Level Task)
Obtain the corresponding paper information from the Task, convert it from
the PDF document format to txt, and save it in the txt_dir path.
Args:
task: A tuple containing basic information, (Author, Publication, Citing
Publication)
pdf_dir: A string representing the storage path of the pdf documents
txt_dir: A string representing the storage path of the txt documents
"""
author, pub, cpub = task
pdf_path = get_cpub_path(pdf_dir, author, pub, cpub, ".pdf")
txt_path = get_cpub_path(txt_dir, author, pub, cpub, ".txt")
if not os.path.exists(pdf_path) or os.path.exists(txt_path):
return
try:
os.makedirs(os.path.dirname(txt_path), exist_ok=True)
extract_text(files=[pdf_path], outfile=txt_path)
except:
pass
def count_txt_files(task: Tuple, pdf_dir: str, txt_dir: str) -> int:
author, pub, cpub = task
txt_path = get_cpub_path(txt_dir, author, pub, cpub, ".txt")
return os.path.exists(txt_path)
def count_pdf_files(task: Tuple, pdf_dir: str, txt_dir: str) -> int:
author, pub, cpub = task
pdf_path = get_cpub_path(pdf_dir, author, pub, cpub, ".pdf")
return os.path.exists(pdf_path)
def convert2txt_pdfx(task: Tuple, pdf_dir: str, txt_dir: str) -> None:
author, pub, cpub = task
pdf_path = get_cpub_path(pdf_dir, author, pub, cpub, ".pdf")
txt_path = get_cpub_path(txt_dir, author, pub, cpub, ".txt")
if not os.path.exists(pdf_path) or os.path.exists(txt_path):
return
try:
os.makedirs(os.path.dirname(txt_path), exist_ok=True)
pdf = pdfx.PDFx(pdf_path)
out = pdf.get_text()
with open(txt_path, "w") as f:
f.write(out)
except:
pass
def generate_summary(
task: Tuple, metadata_dir: str, pdf_dir: str, aminer_dir: str, txt_dir: str, parser
) -> None:
"""
pub level task
"""
author, pub = task
markdown_path = os.path.join(pdf_dir, author, "publications", pub, "summary.md")
json_path = os.path.join(metadata_dir, author, "publications", pub, pub + ".json")
makepardirs(markdown_path)
makepardirs(json_path)
cited_dir = os.path.join(metadata_dir, author, "publications", pub, "cited")
pub_info = load_json(json_path)
cpubs = []
for cpub in os.listdir(cited_dir):
cpub_info = load_json(os.path.join(cited_dir, cpub))
cpub, *_ = os.path.splitext(cpub)
aminer_path = get_cpub_path(aminer_dir, author, pub, cpub, ".json")
pdf_path = get_cpub_path(pdf_dir, author, pub, cpub, ".pdf")
txt_path = get_cpub_path(txt_dir, author, pub, cpub, ".txt")
if not cpub_info["filled"] and os.path.exists(aminer_path):
aminer_info = load_json(aminer_path)
if (
fuzzy_match(cpub_info["bib"]["title"], aminer_info["paper"]["title"])
and "authors" in aminer_info["paper"].keys()
):
cpub_info["bib"]["author"] = [
a["name"] for a in aminer_info["paper"]["authors"]
]
if (
"venue" in aminer_info["paper"].keys()
and "info" in aminer_info["paper"]["venue"].keys()
and "name" in aminer_info["paper"]["venue"]["info"].keys()
):
cpub_info["bib"]["journal"] = aminer_info["paper"]["venue"]["info"][
"name"
]
# print(cpub_info["bib"]["journal"])
if "abstract" in aminer_info["paper"].keys():
cpub_info["bib"]["abstract"] = aminer_info["paper"]["abstract"]
if os.path.exists(pdf_path):
cpub_info["pub_url"] = os.path.abspath(pdf_path)
comments = []
if os.path.exists(txt_path):
comments = parser.parse(txt_path, pub_info)
cpubs.append({"publication": cpub_info, "comments": comments})
try:
CitingDocument(pub_info["bib"]["title"], cpubs, markdown_path).save()
except:
pass
def fill_aminer_info(task: Tuple, metadata_dir: str, aminer_dir: str) -> None:
"""
cpub level task
"""
author, pub, cpub = task
metadata_path = get_cpub_path(metadata_dir, author, pub, cpub, ".json")
aminer_path = get_cpub_path(aminer_dir, author, pub, cpub, ".json")
if not os.path.exists(metadata_path) or os.path.exists(aminer_path):
return
makepardirs(aminer_path)
info = load_json(metadata_path)
try:
aminer = AMinerCrawler(proxy="http://127.0.0.1:24000")
out = aminer.search_publication(info["bib"]["title"])
aminer.driver.quit()
dump_json(out, aminer_path)
except:
pass
def simple_download(url: str, path: str) -> bool:
try:
res = requests.get(url)
if (
"application/pdf" in res.headers["Content-Type"]
or "application/octet-stream" in res.headers["Content-Type"]
or "application/x-download" in res.headers["Content-Type"]
):
with open(path, "wb") as f:
f.write(res.content)
return True
else:
return False
except:
return False
def scihub_download(scihub_crawler, url, path):
return scihub_crawler.download(url, path=path)
def download_pdf(
task: Tuple,
metadata_dir: str,
pdf_dir: str,
downloader: BaseDownlaoder,
) -> None:
"""
cpub level task
"""
author, pub, cpub = task
json_path = get_cpub_path(metadata_dir, author, pub, cpub, ".json")
pdf_path = get_cpub_path(pdf_dir, author, pub, cpub, ".pdf")
if not os.path.exists(json_path) or os.path.exists(pdf_path):
return
os.makedirs(os.path.dirname(pdf_path), exist_ok=True)
info = load_json(json_path)
"""
if "pub_url" in info.keys():
if "saved" in info.keys():
if info["saved"] == "success":
print("[&success]:", info["bib"]["title"])
return
else:
print("[&failed]:", info["bib"]["title"])
ok = False
if "eprint_url" in info.keys():
ok = simple_download(info["eprint_url"], path=pdf_path)
if not ok:
pass
# ok = scihub_download(scihub_crawler, info["pub_url"], path=pdf_path)
status = "success" if ok else "failed"
info["saved"] = status
print("[%s]: %s" % (status, info["bib"]["title"]))
dump_json(info, json_path)
return
"""
if downloader.download(
url=info.get("eprint_url", ""), path=pdf_path, title=info["bib"]["title"]
):
# print("[success]", info["bib"]["title"], info.get("eprint_url", ""))
pass
else:
# print("[failed]", info["bib"]["title"], info.get("eprint_url", ""))
pass
def valid_pdf(path: str) -> bool:
result = filetype.guess(path)
return result is not None and result.mime == "application/pdf"
| true |
a8f4420d7d56d59f4a132958b1c2a506b347a12b | Python | Terobero/THY | /kiosk.py | UTF-8 | 1,506 | 2.609375 | 3 | [] | no_license | from flask import Flask, render_template, request
app = Flask(__name__)
raspi_id = "127.0.0.1:5000/"
@app.route('/')
def index():
return "hello world"
@app.route('/profile/<user>')
def profile(user):
return render_template("profile.html", user=user)
@app.route('/main')
def main():
return render_template("main.html")
@app.route('/welcome', methods = ['GET', 'POST'])
def welcome():
card_no = request.values.get("card_no")
password = request.values.get("password")
#databaseden card no password check edilecek
return render_template("payment.html")
@app.route('/help')
def help():
#led yakma
return "LED YAK"
@app.route('/payment/<type>')
def payment(type):
if type == "coin":
#bozuk para
pass
elif type == "paper":
#nakit para
pass
elif type == "credit":
#kredi karti
pass
elif type == "none":
return render_template("spend.html")
else:
return "Wrong page."
@app.route('/spend')
def spend():
return render_template("spend.html")
@app.route('/spend/<type>')
def spend_type(type):
if type == "shopnmiles":
#thynin sitesine
pass
elif type == "upgrade":
#bilet yukseltme
pass
elif type == "donate":
#para bagislama
pass
elif type == "more":
return render_template("payment.html")
else:
return "Wrong page."
if __name__ == '__main__':
app.run()
'''
Python/HTML/CSS Referance Sheet:
@app.route('/')
@app.route('/<username>') #Username is changing
@app.route('<int:number>') #Input is integer
@app.route('/', methods = ['GET', 'POST'])
''' | true |
326372dfbbd47f88f02b275d72a595ba9402064b | Python | yanshixiao/swyjjtj | /gen_dir_file.py | UTF-8 | 864 | 2.71875 | 3 | [] | no_license | # -*- encoding:utf8 -*-
# 生成10w文件,文件内容随机数
import os
import random
import multiprocessing
import time
base_path = "E:/testdir/rantxt/"
# 删除文件夹下文件
for root, dirs, files in os.walk(base_path, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
# for name in dirs:
# os.rmdir(os.path.join(root, name))
folder = os.path.exists(base_path)
if not folder:
os.makedirs(base_path)
else:
print("已存在")
os.chdir(base_path)
def mktxt(file_num):
for i in range(file_num):
file_name = str(i+1) + ".txt"
with open(file_name, 'w') as file:
file.write(str(random.random()))
if __name__ == "__main__":
start = time.time()
p = multiprocessing.Process(target=mktxt, args=(100000,))
p.start()
end = time.time()
print(end-start)
| true |
50230d14d58373a560e40dfca198cbaee0d36873 | Python | wangyimo/web | /robot_car_wash_web/base_libarary/utils/captcha.py | UTF-8 | 7,025 | 2.515625 | 3 | [] | no_license | from io import BytesIO
import itertools
import os
import time
import requests
from PIL import Image
class CaptchaOCRException(Exception):
pass
class CaptchaOCR(object):
"""
验证码识别
在使用之前需要安装[Google’s Tesseract-OCR Engine](https://github.com/tesseract-ocr/tesseract),
然后将`captcha.traineddata`文件放到tessdata安装目录的tessdata目录下。
"""
def __init__(self, image):
if isinstance(image, Image.Image):
image = image
elif isinstance(image, bytes):
image = Image.open(BytesIO(image))
elif isinstance(image, str):
image = Image.open(image)
else:
raise CaptchaOCRException()
self.ocr_url = 'https://ocr-d.parkone.cn/ocr/captcha'
self.origin_image = image
self.image = image.copy()
self._verify_code = ''
@classmethod
def get_num(cls):
_num = getattr(cls, '_num', 0)
cls._num = _num + 1
return cls._num
@property
def width(self):
return self.image.width
@property
def height(self):
return self.image.height
@property
def verify_code(self):
if self._verify_code == '':
self._verify_code = self.detect()
return self._verify_code
@property
def pixels(self):
return self.image.load()
def image_to_string(self):
verify_code = ''
for im in self._cut():
buf = BytesIO()
im.save(buf, 'png')
buf.seek(0)
resp = requests.post(
url=self.ocr_url,
data={'lang': 'captcha', 'psm': '10'},
files=[
('captcha', ('captcha.png', buf.getvalue(), 'image/png'))]
)
verify_code += resp.json().get('text', '')
# for im in self._cut():
# im = im.convert('L')
# # im.show()
# code = pytesseract.image_to_string(
# im, lang='captcha', config='--psm 10')
# verify_code += code
return verify_code
# self.image = self.image.convert('L')
# self.image.show()
# verify_code = pytesseract.image_to_string(self.image, lang='captcha')
# return verify_code
def detect(self):
self._remove_background()
# image.show()
self._remove_noise()
# image.show()
self._remove_line()
self._remove_noise()
return self.image_to_string()
def _remove_background(self):
pixels, width, height = self.pixels, self.width, self.height
obj, back_pix = {}, None
for i, j in itertools.product(range(width), range(height)):
if obj.get(pixels[i, j], 0) >= 100:
back_pix = pixels[i, j]
break
else:
obj[pixels[i, j]] = obj.get(pixels[i, j], 0) + 1
if back_pix is not None:
for i, j in itertools.product(range(width), range(height)):
if pixels[i, j] == back_pix:
pixels[i, j] = (255, 255, 255)
def _remove_noise(self):
pixels, width, height = self.pixels, self.width, self.height
for i, j in itertools.product(range(width), range(height)):
if pixels[i, j] != (0, 0, 0):
continue
colorful, black = 0, 0
for _i, _j in itertools.product(
range(i - 1, i + 2), range(j - 1, j + 2)):
if min([_i,
_j]) < 0 or _i >= width or _j >= height:
continue
if pixels[_i, _j] == (0, 0, 0):
black += 1
elif pixels[_i, _j] != (255, 255, 255):
colorful += 1
if colorful <= 2 or black < 2:
pixels[i, j] = (255, 255, 255)
continue
def _remove_line(self):
pixels, width, height = self.pixels, self.width, self.height
p = None
for i, j in itertools.product(range(width), range(height)):
if pixels[i, j] == (255, 255, 255):
continue
status, points = self._check(i, j)
if status:
for _ in points:
_s, _ps = self._check(*_)
status &= _s
if status:
p = pixels[i, j]
break
if p is not None:
for i, j in itertools.product(range(width), range(height)):
if pixels[i, j] == p:
pixels[i, j] = (255, 255, 255)
def _check(self, x, y):
pixels, width, height = self.pixels, self.width, self.height
points = ((x + 1, y), (x + 1, y + 1), (x, y + 1), (x - 1, y + 1))
points = [_ for _ in points if
0 <= _[0] < width and 0 <= _[1] < height]
p, n, arr = pixels[x, y], 0, []
for _ in points:
if pixels[_] != p and pixels[_] != (255, 255, 255):
return False, []
elif pixels[_] == p:
n += 1
arr.append(_)
return n > 0, arr
def _cut(self):
pixels, width, height = self.pixels, self.width, self.height
whites = []
for i in range(width):
m = -1
for j in range(height):
if pixels[i, j] != (255, 255, 255):
m = i
break
if m < 0:
whites.append(i)
regions = []
for i in range(1, len(whites)):
if whites[i] != whites[i - 1] + 1:
regions.append((whites[i - 1], whites[i]))
crops = []
for region in regions:
box = (region[0], 0, region[1] + 1, height)
crop = self.image.crop(box)
# crop.save(BytesIO(), 'PNG')
crops.append(crop)
return crops if len(crops) > 3 else []
def save(self, file_path):
image = self.image.convert('L')
if os.path.isdir(file_path):
file_path = '%s/%s.png' % (file_path, time.time())
image.save(file_path, 'png')
if __name__ == '__main__':
def func():
verify_key = int(time.time() * 10000)
url = "https://car-wash-server-d.parkone.cn/admin/get_verify_code"
resp = requests.get(url, params={'vk': 'qhj0xvs0sp', 'num': '4'})
verify_code = CaptchaOCR(resp.content).verify_code
print(verify_code)
if len(verify_code) != 4:
return False
url = "https://car-wash-server-d.parkone.cn/admin/login"
resp = requests.post(url, data={
'username': 'admin',
'password': 'admin123',
'verify_key': verify_key,
'verify_code': verify_code,
})
return resp.status_code == 204
def main():
m, n = 10, 0
for _ in range(m):
n += 1 if func() else 0
print('>>>> %s, %s, %.4f' % (m, n, n / m))
main()
| true |
8b363a631191d2c77c8daa42a26ce8e4e600c17d | Python | ShonnyAIO/Code-In-Place-Stanford-Univeristy-2021 | /Lectures/Lecture 2 - Control Flow/marsweight.py | UTF-8 | 279 | 3.703125 | 4 | [] | no_license | """
Prompts the user for a weight on Earth
and prints the equivalent weight on Mars.
"""
def main():
number = int(input('Enter a weight on Earth: '))
weight = number*37.8/100
print('The equivalent on Mars:', weight)
pass
if __name__ == "__main__":
main()
| true |
0e69e32fe1f78841c3e9e907a966d6ae7ce03810 | Python | brouhahaha/programming_1 | /домашка каникулы/verse.py | UTF-8 | 2,875 | 3.046875 | 3 | [] | no_license | import random
def qws():
with open('words1.txt','r', encoding = 'utf-8') as ws1:
wss1=[]
for line in ws1:
line = line.strip()
wss1.append(line)
return random.choice(wss1)
def wws():
with open('words2.txt','r', encoding = 'utf-8') as ws2:
wss2=[]
for line in ws2:
line = line.strip()
wss2.append(line)
return random.choice(wss2)
def ews():
with open('words3.txt','r', encoding = 'utf-8') as ws3:
wss3=[]
for line in ws3:
line = line.strip()
wss3.append(line)
return random.choice(wss3)
def verb():
with open('words2v.txt','r', encoding = 'utf-8') as ws2v:
wss2v=[]
for line in ws2v:
line = line.strip()
wss2v.append(line)
return random.choice(wss2v)
def p51():
pros=random.choice([1, 2, 3])
if pros == 1:
return qws()+' '+wws()+' '+verb()
elif pros == 2:
return verb()+' '+wws()+' '+qws()
else:
return wws()+' '+qws()+' '+verb()
def p52():
pr=random.choice([1, 2])
if pr == 1:
return ews()+' '+verb()
else:
return verb()+' '+ews()
def p5():
prost = random.choice([1, 2])
if prost == 1:
return p51()
else:
return p52()
def very():
with open('ochen.txt','r', encoding = 'utf-8') as och:
oche=[]
for line in och:
line = line.strip()
oche.append(line)
return random.choice(oche)
def red():
with open('adjn.txt','r', encoding = 'utf-8') as adj:
adjs=[]
for line in adj:
line = line.strip()
adjs.append(line)
return random.choice(adjs)
def plat():
with open('pla.txt','r', encoding = 'utf-8') as pla:
plas=[]
for line in pla:
line = line.strip()
plas.append(line)
return random.choice(plas)
def znak():
zn = [".", "!", "..."]
return random.choice(zn)
def p7():
return very()+' '+red()+' '+plat()+znak()
def maybe():
with open('maybe.txt','r', encoding = 'utf-8') as may:
be=[]
for line in may:
line = line.strip()
be.append(line)
return random.choice(be)
def sun():
with open('pla.txt','r', encoding = 'utf-8') as suns:
sunn=[]
for line in suns:
line = line.strip()
sunn.append(line)
return random.choice(sunn)
def fin():
with open('fin.txt','r', encoding = 'utf-8') as vse:
vses=[]
for line in vse:
line = line.strip()
vses.append(line)
return random.choice(vses)
def last():
return maybe()+', '+sun()+' '+fin()
def poem():
print (p5())
print (p7())
print (p5())
print (p7())
print (last())
poem()
| true |
53353f51e287ed5cab44679e9042745928d67647 | Python | guoweifeng216/python | /python_design/pythonprogram_design/Ch4/4-1-E32.py | UTF-8 | 597 | 3.296875 | 3 | [] | no_license | months = []
def main():
## display months containing the letter r.
global months
fillList()
months = deleteNoRs()
displayMonths()
def fillList():
global months
infile = open("Months.txt", 'r')
months = [line.rstrip() for line in infile]
infile.close
def deleteNoRs():
reducedList = []
for i in range(12):
if 'r' in months[i].lower():
reducedList.append(months[i])
return reducedList
def displayMonths():
print("The R months are:")
print((", ").join(months))
main()
| true |
aa9d95cf4216b639eea52f870d31513eb35a2179 | Python | Alexico1969/AI-tictactoe | /core_phase02.py | UTF-8 | 355 | 4.0625 | 4 | [] | no_license | board = [[0,0,0],[0,0,0],[0,0,0]]
symbol = [".","X","O"]
# Step 2.
#
# - create function to draw the board:
# - call the function
def draw_board():
print()
for row in board:
print("-------------")
print('|', symbol[row[0]], '|', symbol[row[1]], '|', symbol[row[2]], "|")
print("-------------")
print()
draw_board() | true |
5dfba01ba1a4c480f380bbdc5b15551d98bf2817 | Python | WATER-Monster/flask_framework | /middleware/josnWebToken/josn_web_token.py | UTF-8 | 1,770 | 2.796875 | 3 | [] | no_license | import functools
import jwt
import time
from flask import request, jsonify, g
from jwt import ExpiredSignatureError
from config.constans import SECRET_KEY, TOKEN_EXPIRE_TIME, TOKEN_ALGORITHM, TOKEN_HEADERS, TOKEN_ENCODE_TYPE, \
PAYLOAD_PARAM, TOKEN_NAME
class JWT:
"""
JWT生成以及check
"""
def __new__(cls, *args, **kwargs):
if not hasattr(cls, '_instance'):
cls._instance = object.__new__(cls)
return cls._instance
@staticmethod
def create_token(param):
exp = int(time.time() + TOKEN_EXPIRE_TIME)
payload = {
PAYLOAD_PARAM: param,
"exp": exp
}
token = jwt.encode(payload=payload, key=SECRET_KEY, algorithm=TOKEN_ALGORITHM, headers=TOKEN_HEADERS).decode(TOKEN_ENCODE_TYPE)
return token
@staticmethod
def check_token(token, param):
try:
info = jwt.decode(token, SECRET_KEY, True, algorithm=TOKEN_ALGORITHM)
except ExpiredSignatureError: # token 过期
return 0
if info.get(PAYLOAD_PARAM) == param: # 验证通过
return 1
else:
return -1
def jwt_wrapper(func):
"""
jwt 装饰器
:return:
"""
@functools.wraps(func)
def wrapper(*args,**kwargs):
token = request.headers.get(TOKEN_NAME)
param_value = g.get(PAYLOAD_PARAM)
jwt_instance = JWT()
result = jwt_instance.check_token(token, param_value)
if result == -1:
return jsonify({'code': 400, 'msg': 'token验证未通过'})
elif result == 0:
return jsonify({'code': 400, 'msg': 'token过期,请重新登陆'})
else:
res = func(*args,**kwargs)
return res
return wrapper | true |
35952051556a2ceb9e2c869409e1218cb054dacf | Python | fabiano-teichmann/top_apps_apple_store | /core/controller.py | UTF-8 | 3,452 | 2.8125 | 3 | [] | no_license | import os
import pandas as pd
from django.utils.datetime_safe import datetime
from pymongo import MongoClient
from pymongo.errors import BulkWriteError
class Controller(object):
def __init__(self):
self.path = os.getcwd()
now = datetime.now()
self.csv_file = f"report_apple_store-{now.day}-{now.month}-{now.year}:{now.hour}:{now.minute}.csv"
client = MongoClient('localhost', 27017)
db = client['apple_store']
self.collection = db['apps']
def load_csv(self, file_):
"""
Args:
file_ (str): path file
Returns:
"""
return pd.read_csv(file_)
def get_top_apps(self, df):
""" Pega os top 10 app das categorias notícias, livros, músicas
Args:
df (dataframe)
Returns:
Dataframe: 10 apps de notícias com mais avaliações.
Dataframe: 10 apps de livros com mais avaliações.
Dataframe: 10 apps de música com mais avaliações.
"""
# 10 apps de notícias com mais avaliações
news = df[df.prime_genre == 'News']
top_news = news.sort_values(by='rating_count_tot', ascending=False)
# 10 app musicas com mais avaliações
musics = df[df.prime_genre == 'Music']
top_musics = musics.sort_values(by='rating_count_tot', ascending=False)
# 10 app de livros com mais avaliações
books = df[df.prime_genre == 'Book']
top_books = books.sort_values(by='rating_count_tot', ascending=False)
return top_news[0:10], top_books[0:10], top_musics[0:10]
@staticmethod
def generate_report(top_news, top_books, top_musics):
""" Gera o report final com os tops apps das categoria notícia, livros e musica
Args:
top_news (DataFrame):
top_books (DataFrame):
top_musics(DataFrame):
Returns:
DataFrame: dataframe final
"""
# Concateno os dataframe
df_concat = [top_news, top_books, top_musics]
# Seleciono as colunas necessárias e renomeio rating_count_tot para n_citacoes
report = pd.concat(df_concat)
report = report[['id', 'track_name', 'rating_count_tot', 'size_bytes', 'price', 'prime_genre']]
report = report.rename(columns={'rating_count_tot': 'n_citacoes'})
return report
def generate_csv(self, report):
"""
Args:
report (DataFrame):
Returns:
str: path csv gerado
"""
path_file = os.path.join(self.path, 'media', self.csv_file)
report.to_csv(path_file, index=False)
return self.csv_file
def save_db(self, df):
""" Salva no mongo db os resultados
Args:
df (dataframe)
Returns:
bool:
"""
report = df[['id', 'track_name', 'size_bytes', 'currency', 'price', 'rating_count_tot', 'rating_count_ver',
'user_rating', 'user_rating_ver', 'ver', 'cont_rating', 'prime_genre']]
report = report.rename(columns={'rating_count_tot': 'n_citacoes', 'id': '_id'})
reports = report.to_dict(orient='records')
try:
self.collection.insert_many(reports)
return True
except BulkWriteError:
return False
def get_all_values(self):
data = self.collection.find({})
return [x for x in data]
| true |
44c550dfec6ee3cf97038611f86180da6533382a | Python | retarf/snake | /snake.py | UTF-8 | 5,018 | 3.546875 | 4 | [] | no_license | #!/bin/env python
from curses import wrapper
import curses
from game_over import game_over
class Snake:
def __init__(self, screen, pos_y=-1, pos_x=-1, length=3, speed=100):
self.screen = screen
self.min_y = 1
self.min_x = 1
self.max_y = self.screen.getmaxyx()[0] - 2 # max y without borders
self.max_x = self.screen.getmaxyx()[1] - 2 # max x without borders
self.x = pos_x
self.y = pos_y
# if position isn't set, set it on center
if self.x == -1:
self.x = int(self.max_x / 2)
if self.y == -1:
self.y = int(self.max_y / 2)
self.pos = (self.y, self.x)
self.length = length
self.first_length = length # length on the start
self.last = None
self.speed = speed # speed in ms
self.direction = self.move_right
self.look = "@"
self.body = []
self.body.append((self.y, self.x))
self.score = 0
def last_pos(self):
'''Return coordinate of last pice of body'''
if len(self.body) > self.length:
last = self.body.pop(0)
self.last = last
def show(self):
'''Show snake on the screen'''
curses.init_pair(1, 1, 0)
# self.last_pos()
last = self.last
# first, delete last pice
if last:
(last_y, last_x) = last
self.screen.addch(last_y, last_x, ' ')
# then, draw snake on the screen
for pice in self.body:
(pice_y, pice_x) = pice
self.screen.addstr(pice_y, pice_x, self.look, curses.color_pair(1))
def move(self):
'''Move snake after sec second, takes keys to change direction and save first element actual position'''
c = self.screen.getch()
curses.flushinp()
if c == curses.KEY_UP and self.direction != self.move_down:
self.move_up()
self.direction = self.move_up
elif c == curses.KEY_DOWN and self.direction != self.move_up:
self.move_down()
self.direction = self.move_down
elif c == curses.KEY_RIGHT and self.direction != self.move_left:
self.move_right()
self.direction = self.move_right
elif c == curses.KEY_LEFT and self.direction != self.move_right:
self.move_left()
self.direction = self.move_left
else:
self.direction()
# save first element position
self.pos = self.body[len(self.body) - 1]
self.last_pos()
def eat(self, snack):
''' Snake eat meal.snacks '''
if self.pos == snack:
self.length += 1
self.score += 1
# Game over if body takes all board
if self.length > (self.max_y * self.max_x):
self.show()
self.screen.refresh()
game_over(self.screen, self.score)
def body_check(self):
'''Check if snake eat him self'''
if self.body.count(self.pos) > 1:
game_over(self.screen, self.score)
class FreeSnake(Snake):
'''This snake will go through the walls'''
def move_left(self):
'''Move left and append coordinate to body'''
self.x -= 1
if self.x < self.min_x:
self.x = self.max_x
self.body.append((self.y, self.x))
def move_right(self):
'''Move right and append coordinate to body'''
self.x += 1
if self.x > self.max_x:
self.x = self.min_x
self.body.append((self.y, self.x))
def move_up(self):
'''Move up and append coordinate to body'''
self.y -= 1
if self.y < self.min_y:
self.y = self.max_y
self.body.append((self.y, self.x))
def move_down(self):
'''Move down and append coordinate to body'''
self.y += 1
if self.y > self.max_y:
self.y = self.min_y
self.body.append((self.y, self.x))
class CageSnake(Snake):
'''This snake won't go through the walls'''
def move_left(self):
'''Move left and append coordinate to body'''
self.x -= 1
if self.x < 1:
game_over(self.screen, self.score)
else:
self.body.append((self.y, self.x))
def move_right(self):
'''Move right and append coordinate to body'''
self.x += 1
if self.x > self.max_x:
game_over(self.screen, self.score)
else:
self.body.append((self.y, self.x))
def move_up(self):
'''Move up and append coordinate to body'''
self.y -= 1
if self.y < 1:
game_over(self.screen, self.score)
else:
self.body.append((self.y, self.x))
def move_down(self):
'''Move down and append coordinate to body'''
self.y += 1
if self.y > self.max_y:
game_over(self.screen, self.score)
else:
self.body.append((self.y, self.x))
| true |
1c8cbb5370e17b4a39940442464e4f1a8f86a3a3 | Python | Yufeng-L/Tweets-Analysis | / code/Google_keyword.py | UTF-8 | 687 | 2.53125 | 3 | [
"MIT"
] | permissive | from google.cloud import language
from google.cloud.language import enums
from google.cloud.language import types
client = language.LanguageServiceClient()
document = language.types.Document(
content='Michelangelo Caravaggio, Italian painter, is known for "The Calling of Saint Matthew".',
type=language.enums.Document.Type.PLAIN_TEXT,
)
response = client.analyze_entities(
document=document,
encoding_type='UTF32',
)
for entity in response.entities:
print('=' * 20)
print(' name: {0}'.format(entity.name))
print(' type: {0}'.format(entity.type))
print(' metadata: {0}'.format(entity.metadata))
print(' salience: {0}'.format(entity.salience)) | true |
0120c7dc1a27d8d83082436ba4ace38d391b7b86 | Python | Rup-Royofficial/Codechef_solutions | /Smart Phone.py | UTF-8 | 405 | 3.59375 | 4 | [] | no_license | n = int(input())
y = []
count_1 = 0
count_2 = 0
for i in range(n):
x = int(input())
y.append(x)
y.sort()
if len(y)%2==0:
z = len(y)//2
for j in range(z+1):
count_1= len(y[z-1:])
print(count_1*y[z-1])
elif len(y)%2!=0:
z = int(len(y)//2)
for a in range(z+1):
count_2 = len(y[z:])
print(count_2*y[z])
else:
print("ERROR") | true |
e8ffa9517eb85b9d08d9cde6e207a8a70aad94b0 | Python | prabal-007/Polynomial-Regression | /Poly.py | UTF-8 | 456 | 3.109375 | 3 | [] | no_license | def PlotPolly(model, independent_variable, dependent_variabble, x_label, y_label):
x_new = np.linspace(15, 55, 100)
y_new = model(x_new)
plt.plot(independent_variable, dependent_variabble, '.', x_new, y_new, '-')
plt.title('Polynomial Fit with Matplotlib for Price ~ Length')
ax = plt.gca()
ax.set_facecolor((0.898, 0.898, 0.898))
fig = plt.gcf()
plt.xlabel(x_label)
plt.ylabel(y_label)
plt.show()
plt.close()
| true |
21cb0cd77a5c1ec0fe20478ee6bc64e2c5524a4d | Python | nwam/fradio-server | /server/send_broadcasts.py | UTF-8 | 1,550 | 2.9375 | 3 | [] | no_license | import socket
#########################################
# Functions for sending broadcasts to clients
# Replaced by a hack in fradio-client
#########################################
def send_message_to_listeners(host_spotify_username, message):
# Get list of listeners
get_listener_ips = """SELECT ipAddress FROM user WHERE listening = %s"""
get_listener_ips_args = (host_spotify_username,)
listner_ips = fradiodb.query_all(get_listener_ips, get_listener_ips_args)
# Send message to list of listeners
for listener_ip in listner_ips:
listener_ip = listener_ip[0] # because listener_ips is a tuple of tuples
send_tcp_message(listener_ip, CLIENT_PORT, message)
def send_tcp_message(ip, port, message):
sock = _connect_tcp((ip,port))
print("Sending message to {}:{}:\n{}".format(ip,port,message))
if sock is None:
return None
sock.send(prepend_message_size(bytes(message, ENCODING)))
sock.close()
return None
def _connect_tcp(connectinfo):
"""
Connect to the device using the given IP, port pairing
:return: The created TCP socket.
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(10)
try:
sock.connect(connectinfo)
except OSError as e:
print('Connection error')
print(str(e))
return None
except (EOFError, KeyboardInterrupt):
print('Connect cancelled')
return None
def prepend_message_size(message):
return "{}\n{}".format(len(message), message)
| true |
758d148f069b5bfa9b61716a087477e76fa24ce0 | Python | vickyjr7/PM-Speech-and-Trend-Analysis | /Count word from txt file new.py | UTF-8 | 1,643 | 3.03125 | 3 | [] | no_license | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 7 21:05:45 2020
@author: VICKY JUNGHARE
"""
import pandas as pd
file="C:/Users/VICKY JUNGHARE/Documents/Speech/31 may 2020.txt"
dataset = pd.read_csv(file,delimiter = '\t', quoting = 3)
dataset.columns.values[0] = "new_name"
df=dataset.new_name
df = df.str.replace(',', '').str.replace('.', '').str.lower()
df=df.tolist()
df123 = pd.DataFrame([sub.split(" ") for sub in df])
n=len(df123)
m=len(df123.columns)
result = []
for i in range(m):
for j in range(n):
x=df123[i][j]
if x!= None :
result.append(x)
else:
j=j+1
result.sort()
p=len(result)
count1=0
count2=0
count3=0
count4=0
count5=0
count6=0
count7=0
count8=0
word="economy"
for k in range(p):
if result[k]==word:
count1=count1+1
word="aatmanirbhar"
for k in range(p):
if result[k]==word:
count2=count2+1
word="education"
for k in range(p):
if result[k]==word:
count3=count3+1
word="nep"
for k in range(p):
if result[k]==word:
count4=count4+1
word="doctors"
for k in range(p):
if result[k]==word:
count5=count5+1
word="farmers"
for k in range(p):
if result[k]==word:
count6=count6+1
word="china"
for k in range(p):
if result[k]==word:
count7=count7+1
word="trump"
for k in range(p):
if result[k]==word:
count8=count8+1
print(file, "Economy=", count1,"Aatmanirbhar=",count2,"Education(NEP)=",count3+count4,"Doctors=",count5,"Farmers=",count6,"China=",count7,"Trump=",count8)
| true |
685b9a59890f041b679e2ff48ea5a0c6be49b698 | Python | voberto/WSN_Temp_GUI | /client/venv001/src/gui_plots.py | UTF-8 | 3,478 | 2.734375 | 3 | [] | no_license | ### ------------------------------------ ###
### Name: gui_plots.py
### Author: voberto
### ------------------------------------ ###
# Imports section
# 1.1 - General imports
from kivy.garden.matplotlib.backend_kivyagg import FigureCanvasKivyAgg
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import matplotlib
from datetime import datetime
from matplotlib.dates import DateFormatter
from matplotlib.ticker import MaxNLocator
import matplotlib.dates as md
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.widget import Widget
import time
# 1.2 - Project-specific imports
import gui_vars as gui_vars
import gui_db as gui_db
# 2 - Variables
fig_temp = plt.figure()
ax1_temp = fig_temp.add_subplot(1,1,1)
xs_temp = []
ys_temp = []
temp_xaxis_max_ticks = 5
var_temp_last = 0
temp_x_vec_db = []
temp_y_vec_db = []
var_time_last = 0
# 3 - Functions
# 3.1 - Create animation for the plot with received temperature data
def func_animate_temp(i):
# If new data has arrived
if gui_vars.TempNewArrived_Flag.new_temp_flag == True:
global var_time_last
global var_temp_last
global temp_xaxis_max_ticks
# Record current timestamp
x_var = datetime.now()
# Convert timestamp for each vector (plot and database)
x_var_plot = matplotlib.dates.datestr2num(str(x_var))
x_var_db = str(x_var.strftime("%H:%M:%S"))
# Append data to each axis vector
xs_temp.append(x_var_plot)
ys_temp.append(float(gui_vars.TempLast_Value.last_temp_var))
temp_x_vec_db.append(x_var_db)
# Save data to database
temp_y_vec_db.append(gui_vars.TempLast_Value.last_temp_var)
gui_db.db1_csv_savedata(temp_x_vec_db, temp_y_vec_db)
# Limit figure size by deleting vector elements when
# vector size is higher than "var_tempfig_maxlength"
var_tempfig_maxlength = 30
if len(xs_temp) > var_tempfig_maxlength:
xs_temp.pop(0)
ys_temp.pop(0)
ax1_temp.clear()
# Specify maximum number of ticks on X axis
locator = plt.MaxNLocator(temp_xaxis_max_ticks)
ax1_temp.xaxis.set_major_locator(locator)
# Specify legend format for X axis labels
temp_xlabel_format = md.DateFormatter('%H:%M:%S')
ax1_temp.xaxis.set_major_formatter(temp_xlabel_format)
ax1_temp.grid(which='both')
ax1_temp.set_title("Temperature")
ax1_temp.set_ylabel("\u00b0C")
ax1_temp.set_xlabel("Time")
# Plot data
ax1_temp.plot_date(xs_temp, ys_temp, marker='o', linestyle='-')
gui_vars.TempNewArrived_Flag.new_temp_flag = False
# 4 - Classes
class PlotBox_temp(BoxLayout):
def __init__(self, **kwargs):
super(PlotBox_temp, self).__init__(**kwargs)
# Set the animation update interval
temp_anim_interval_ms = 200
# Add figure to canvas
self.add_widget(FigureCanvasKivyAgg(figure=fig_temp))
# Add animation to figure
self.anim = animation.FuncAnimation(fig_temp, func_animate_temp, interval=temp_anim_interval_ms)
# Set axis variables
ax1_temp.set_title("Temperature")
ax1_temp.set_ylabel("\u00b0C")
ax1_temp.set_xlabel("Time")
ax1_temp.grid(which='both')
# Hide x-axis labels at plot startup
ax1_temp.set_xticklabels([])
# First empty plot
ax1_temp.plot_date(xs_temp, ys_temp, marker='o', linestyle='-')
| true |
7cdd4dfdfaeb96b63c562867c6f7f1ea59c37b6d | Python | noothanprem/python | /datastructureprograms/binarysearchtree_utility.py | UTF-8 | 740 | 4.125 | 4 | [] | no_license | #Function to find the factorial of a number
def fact(num):
#factorial of '2' is '1'
if num < 2:
return 1
else:
#Initializing 'f' and 'fact' with '1'
f=1
fact=1
#Loop continues till 'f' reaches 'num'
while(f != num+1):
fact=fact*f
#Increment the value of 'f' by '1' in every iteration
f+=1
#returns the final factorial value
return fact
#Function to find the number of nodes
def find(n):
if(str(n).isdigit()):
#Applying the equation to find the number of nodes, fact() function is called here
val=fact(2*n)//((fact(n+1)*fact(n)))%100000007
print(val)
return val
else:
return False | true |
af8b791f4bdb4ac6e7f5600e811cc7f7b9111b8f | Python | EnergyWork/create-clone-delete-remote-repos | /application.py | UTF-8 | 8,699 | 2.71875 | 3 | [] | no_license | import os
import re
import requests
import tkinter as tk
import tkinter.filedialog as fd
import github
import pygit2
from tkinter import messagebox
from tkinter.ttk import *
from github import Github
# from git import Repo
class Application:
"""
Main class of application
"""
choosed_programm = 0
github_account = None
root = None
def __init__(self):
pass
def __center_window(self, window):
w, h, sx, sy = map(int, re.split(r'x|\+', window.winfo_geometry()))
sw = (window.winfo_rootx() - sx) * 2 + w
sh = (window.winfo_rooty() - sy) + (window.winfo_rootx() - sx) + h
sx = (window.winfo_screenwidth() - sw) // 2
sy = (window.winfo_screenheight() - sh) // 2
window.wm_geometry('+%d+%d' % (sx, sy))
def __ccrr(self):
repo_name = self.cbox_repository.get()
if not (repo_name and not repo_name.isspace()):
messagebox.showerror('ERROR', 'Repository name field is empty!')
return
clone = self.bl_clone_it.get()
try:
repo = self.github_account.get_user().create_repo(name=repo_name, homepage='https://github.com')
if clone:
clone_path = self.ent_clone_to.get()
clone_path = os.path.join(clone_path, repo_name)
cloned = pygit2.clone_repository(repo.git_url, clone_path)
except github.GithubException as e:
if e.status == 422 and clone:
ans = messagebox.askyesno('WARNING', f"Code: {e.status}, {e.data['message']}\n{e.data['errors'][0]['message']}\nClone?")
if ans:
repo = self.github_account.get_user().get_repo(name=repo_name)
clone_path = self.ent_clone_to.get()
clone_path = os.path.join(clone_path, repo_name)
cloned = pygit2.clone_repository(repo.git_url, clone_path)
messagebox.showinfo('Info', f'Cloned to {clone_path}')
else:
messagebox.showerror('ERROR', f"Code: {e.status}, {e.data['message']}\n{e.data['errors'][0]['message']}")
def __drr(self):
repo_name = self.cbox_repository.get()
repo = self.github_account.get_user().get_repo(repo_name)
repo.delete()
def __doit(self):
if self.github_account == None:
messagebox.showerror('ERROR', "You're not authenticated!\nMenu > Auth > insert you auth token")
return
if self.bl_clone_it.get():
if not (self.ent_clone_to.get() and not self.ent_clone_to.get().isspace()):
messagebox.showerror('ERROR', "Choose a directory to clone")
return
if self.choosed_programm == 0:
self.__ccrr()
elif self.choosed_programm == 1:
self.__drr()
else:
self.root.destroy()
def __get_reposities(self):
repos_list = []
for repo in self.github_account.get_user().get_repos():
repos_list.append(repo.name)
self.cbox_repository['values'] = repos_list
def __clone_it(self):
if self.bl_clone_it.get():
self.chkbtn_clone_it['text'] = 'Clone to:'
self.ent_clone_to['state'] = tk.NORMAL
self.btn_done['text'] = 'Create and clone repository'
self.btn_choose_dir.grid(column=1, row=0)
else:
self.chkbtn_clone_it['text'] = 'Clone it?'
self.ent_clone_to['state'] = tk.DISABLED
self.btn_done['text'] = 'Create repository'
self.btn_choose_dir.grid_forget()
def __on_enter(self, e):
self.btn_choose_dir['foreground'] = '#3a5ae8'
def __on_leave(self, e):
self.btn_choose_dir['foreground'] = '#122faa'
def __choose_dir(self, e):
directory = fd.askdirectory(title='Выбирете директорию', initialdir='/')
self.ent_clone_to.delete(0, last=tk.END)
self.ent_clone_to.insert(0, directory)
def __auth(self, userlogin, window):
try:
self.github_account = Github(login_or_token=userlogin)
u = self.github_account.get_user().login
messagebox.showinfo('Authenticated', f'Hi {u}')
self.lbl_who['text'] = u
self.__get_reposities()
window.destroy()
except github.BadCredentialsException:
messagebox.showerror("ERROR", "Authentication error")
def __auth_window_via_token(self):
self.choosed_auth_method = 0
window = tk.Toplevel(self.root)
window.iconbitmap(self.path_to_label_image)
window.resizable(0, 0)
lbl_auth_token = Label(master=window, text='Authentication token')
lbl_auth_token.grid(column=0, row=0, sticky='w', padx=15, pady=5)
token = tk.StringVar()
ent_auth_token = Entry(master=window, textvariable=token, show='*', width=50)
ent_auth_token.grid(column=0, row=1, sticky='w', padx=15, pady=5)
btn_done = Button(master=window, text='Auth', command=lambda:self.__auth(token.get(), window))
btn_done.grid(column=0, row=2, padx=5, pady=5)
self.__center_window(window)
#window.transient()
window.grab_set()
#window.focus_set()
window.wait_window()
#window.mainloop()
def __set_window_ccrr(self):
self.choosed_programm = 0
self.btn_done['text'] = 'Create repository'
self.layout_h.grid(column=0, row=5, sticky='w', padx=5, pady=5)
self.ent_clone_to.grid(column=0, row=6, sticky='w', padx=5, pady=5)
def __set_window_drr(self):
self.choosed_programm = 1
self.btn_done['text'] = 'Delete'
self.layout_h.grid_forget()
self.ent_clone_to.grid_forget()
def __main_window(self, parent):
self.lbl_repos_name = Label(master=parent, text='Repository name')
self.lbl_repos_name.grid(column=0, row=2, sticky='w', padx=5, pady=5)
self.cbox_repository = Combobox(master=parent,width=47)
self.cbox_repository.grid(column=0, row=3, sticky='w', padx=5, pady=5)
self.layout_h = tk.Frame(master=parent)
self.bl_clone_it = tk.BooleanVar()
self.chkbtn_clone_it = Checkbutton(master=self.layout_h, text='Clone it?', variable=self.bl_clone_it, onvalue=True, offvalue=False, command=self.__clone_it)
self.chkbtn_clone_it.grid(column=0, row=0)
self.btn_choose_dir = Label(master=self.layout_h, text='choose a directory', font="Verdana 8 underline", foreground='blue', cursor='hand2')
self.btn_choose_dir.bind('<Button-1>', self.__choose_dir)
self.btn_choose_dir.bind('<Enter>', self.__on_enter)
self.btn_choose_dir.bind('<Leave>', self.__on_leave)
self.layout_h.grid(column=0, row=5, sticky='w', padx=5, pady=5)
self.ent_clone_to= Entry(master=parent, width=50, state=tk.DISABLED)
self.ent_clone_to.grid(column=0, row=6, sticky='w', padx=5, pady=5)
self.btn_done = Button(master=parent, text='Create repository', command=self.__doit)
self.btn_done.grid(column=0, row=7, padx=5, pady=5)
self.lbl_who = Label(master=parent, text='Not authenticated', foreground='gray')
self.lbl_who.grid(column=0, row=8, padx=0, pady=0)
def __menu(self):
#
choose_action = tk.Menu(tearoff=0)
choose_action.add_cascade(label='Create/Clone remote repos', command=self.__set_window_ccrr)
choose_action.add_cascade(label='Delete remote repos', command=self.__set_window_drr)
#
menu_menu = tk.Menu(tearoff=0)
menu_menu.add_cascade(label='Auth', command=self.__auth_window_via_token)
menu_menu.add_cascade(label='Programm', menu=choose_action)
menu_menu.add_separator()
menu_menu.add_cascade(label='Exit', command=(lambda: exit(0)))
#
main_menu = tk.Menu(tearoff=0)
main_menu.add_cascade(label='Menu', menu=menu_menu)
main_menu.add_cascade(label='About') #TODO add command
return main_menu
def run(self):
self.root = tk.Tk()
self.root.title('Simple repository manipulation')
self.path_to_label_image = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'resourses\\angleicon.ico')
self.root.iconbitmap(self.path_to_label_image)
self.root.config(menu=self.__menu())
#root.geometry('380x270')
self.root.resizable(0, 0)
main = Frame(self.root)
self.__main_window(main)
main.grid(column=0, row=0, padx=25, pady=20)
self.__center_window(self.root)
self.root.mainloop()
Application().run() | true |
72f185e4625809d4e37ccb832a4abe24ca9d8a07 | Python | kotsky/quadcopter-matlab | /py-drone-vision/project.py | ISO-8859-1 | 29,845 | 2.5625 | 3 | [
"MIT"
] | permissive | import cv2
import numpy as np
import os
import RPi.GPIO as gpio
import time
###################################################################################################
def adjust_gamma(image, gamma):
invGamma = 1.0 / gamma
table = np.array([((i / 255.0) ** invGamma) * 255
for i in np.arange(0, 256)]).astype("uint8")
return cv2.LUT(image, table)
gpio.setmode(gpio.BCM)
pin_x_l = 12
pin_x_r = 16
pin_y_up = 20
pin_y_d = 21
freq_x = 50
freq_y = 50
gpio.setup(pin_x_l, gpio.OUT)
gpio.setup(pin_x_r, gpio.OUT)
pwmObject_x_l = gpio.PWM(pin_x_l, freq_x)
pwmObject_x_r = gpio.PWM(pin_x_r, freq_x)
pwmObject_x_l.start(0)
pwmObject_x_r.start(0)
gpio.setup(pin_y_up, gpio.OUT)
gpio.setup(pin_y_d, gpio.OUT)
pwmObject_y_up = gpio.PWM(pin_y_up, freq_y)
pwmObject_y_d = gpio.PWM(pin_y_d, freq_y)
pwmObject_y_up.start(0)
pwmObject_y_d.start(0)
###################################################################################################
def moved_x(AK_x,Turn_x_l,Turn_x_r):
if AK_x == 1:
DC_x_l = int(Turn_x_l)
DC_x_r = int(0)
pwmObject_x_r.ChangeDutyCycle(DC_x_r)
pwmObject_x_l.ChangeDutyCycle(DC_x_l)
else:
DC_x_r = int(Turn_x_r)
DC_x_l = int(0)
pwmObject_x_l.ChangeDutyCycle(DC_x_l)
pwmObject_x_r.ChangeDutyCycle(DC_x_r)
def moved_y(AK_y,Turn_y_up,Turn_y_d):
if AK_y == 1:
DC_y_d = int(Turn_y_d)
DC_y_up = int(0)
pwmObject_y_up.ChangeDutyCycle(DC_y_up)
pwmObject_y_d.ChangeDutyCycle(DC_y_d)
else:
DC_y_up = int(Turn_y_up)
DC_y_d = int(0)
pwmObject_y_d.ChangeDutyCycle(DC_y_d)
pwmObject_y_up.ChangeDutyCycle(DC_y_up)
###################################################################################################
def main():
DELAY = 0.001
diap_x = int(20)
diap_y = int(10)
AK_x = 1
AK_y = 1
Angle_x = 1
Koeff_x = 1
Angle_y = 1
Koeff_y = 1
###################################################################################################
capWebcam = cv2.VideoCapture(0)
print("default resolution = " + str(capWebcam.get(cv2.CAP_PROP_FRAME_WIDTH)) + "x" + str(capWebcam.get(cv2.CAP_PROP_FRAME_HEIGHT)))
capWebcam.set(cv2.CAP_PROP_FRAME_WIDTH, 320.0)
capWebcam.set(cv2.CAP_PROP_FRAME_HEIGHT, 240.0)
intXFrameCenter = int(float(capWebcam.get(cv2.CAP_PROP_FRAME_WIDTH)) / 2.0)
intYFrameCenter = int(120)
Position_x = 0
Position_y = 0
if capWebcam.isOpened() == False:
print("error: capWebcam not accessed successfully\n\n")
os.system("pause")
return
# end if
###################################################################################################
while cv2.waitKey(1) != 27 and capWebcam.isOpened():
blnFrameReadSuccessfully, imgOriginal = capWebcam.read()
if not blnFrameReadSuccessfully or imgOriginal is None:
print("error: frame not read from webcam\n")
os.system("pause")
break
# end if
gray = cv2.cvtColor(imgOriginal, cv2.COLOR_BGR2GRAY)
gray = cv2.bilateralFilter(gray, 7, 12, 12)
med = np.median(gray)
if med < 11:
gamma = 2
elif med < 31:
gamma = 1.6
elif med < 51:
gamma = 1.4
elif med < 130:
gamma = 2
else:
gamma = 1.3
imgGamma = adjust_gamma(imgOriginal, gamma)
imgHSV = cv2.cvtColor(imgGamma, cv2.COLOR_BGR2HSV)
imgThreshLow = cv2.inRange(imgHSV, np.array([0, 170, 120]), np.array([20, 240, 255]))
imgThreshHigh = cv2.inRange(imgHSV, np.array([20, 70, 170]), np.array([40, 170, 255]))
imgThresh = cv2.add(imgThreshLow, imgThreshHigh)
st1 = cv2.getStructuringElement(cv2.MORPH_RECT, (15, 15), (7, 7))
st2 = cv2.getStructuringElement(cv2.MORPH_RECT, (11, 11), (5, 5))
imgThresh = cv2.morphologyEx(imgThresh, cv2.MORPH_CLOSE, st1)
imgThresh = cv2.morphologyEx(imgThresh, cv2.MORPH_OPEN, st2)
sigma = 33
low = int(max(0, (1.0 - sigma/100) * med))
up = int(min(255, (1.0 + sigma/100) * med))
edged = cv2.Canny(imgThresh, low, up)
img, contours,hierarchy = cv2.findContours(edged.copy(),cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE )
try:
cnt = contours[0]
###################################################################################################
except Exception:
Turn_x_l = int(0)
Turn_x_r = int(0)
if Angle_x == 1:
if Position_x >= 140:
Koeff_x = -1
else:
if Position_x <= -140:
Koeff_x = -1
Turn_y_up = int(0)
Turn_y_d = int(0)
if Angle_y == 1:
if Position_y >= 140:
Koeff_y = -1
else:
if Position_y <= -140:
Koeff_y = -1
###################################################################################################
else:
Koeff = 1
a,b,w,h = cv2.boundingRect(cnt)
cv2.rectangle(imgOriginal,(a,b),(a+w,b+h),(0,255,0),2)
x = a+w/2
y = b+h/2
print("ball position x = " + str(x) + ", y = " + str(y))
###################################################################################################
if x > intXFrameCenter:
Angle_x = 1
Turn_x_r = int(0)
if x < intXFrameCenter + diap_x:
Turn_x_l = int((x-160)*0.2)
else:
Turn_x_l = int(0.3 * (x-160))
else:
Angle_x = -1
Turn_x_l = int(0)
if intXFrameCenter - diap_x < x:
Turn_x_r = int(-(x-160)*0.2)
else:
Turn_x_r = int(0.3 * (-(x-160)))
###################################################################################################
if y > intYFrameCenter:
Angle_y = 1
Turn_y_up = int(0)
if y < intYFrameCenter + diap_y:
Turn_y_d = int((y-120)*0.25)
else:
Turn_y_d = int((y-120)*0.35)
else:
Angle_y = -1
Turn_y_d = int(0)
if intYFrameCenter - diap_y < y:
Turn_y_up = int(-(y-120)*0.25)
else:
Turn_y_up = int(0.35*(-(y-120)))
###################################################################################################
AK_x = Angle_x*Koeff_x
AK_y = Angle_y*Koeff_y
moved_x(AK_x,Turn_x_l,Turn_x_r)
moved_y(AK_y,Turn_y_up,Turn_y_d)
###################################################################################################
# cv2.namedWindow("imgOriginal", cv2.WINDOW_AUTOSIZE)
# cv2.namedWindow("imgThresh", cv2.WINDOW_AUTOSIZE)
# cv2.imshow("imgOriginal", imgOriginal)
# cv2.imshow("imgThresh", imgThresh)
# end while
cv2.destroyAllWindows()
return
###################################################################################################
if __name__ == "__main__":
main()
0
4,36836088412015
8,39513069622820
11,8445744743349
14,6224014978397
16,7425610040527
18,2718445024260
19,2865770782421
19,8488249665799
25 0 0
24,3963823901878 0 4,36836088412015
22,6909184956658 0 8,39513069622820
20,1442476095044 0 11,8445744743349
17,0562202013287 0 14,6224014978397
13,6751743033666 0 16,7425610040527
10,1657896335550 0 18,2718445024260
6,61767810076558 0 19,2865770782421
3,06801733771060 0 19,8488249665799
waypoints = [
0 0 20
3.06801733771060 0 19.8488249665799
6.61767810076558 0 19.2865770782421
10.1657896335550 0 18.2718445024260
13.6751743033666 0 16.7425610040527
17.0562202013287 0 14.6224014978397
20.1442476095044 0 11.8445744743349
22.6909184956658 0 8.39513069622820
24.3963823901878 0 4.3683608841201
25.0001231321231 0 0];
[0 0 20
5 0 19.9
10 0 19.6
15 0 19
20 0 17
22.5 0 10
25 0 4
25 0 0];
waypoints = [
0 0 20
3.06801733771060 0 19.8488249665799
6.61767810076558 0 19.2865770782421
10.1657896335550 0 18.2718445024260
13.6751743033666 0 16.7425610040527
17.0562202013287 0 14.6224014978397
20.1442476095044 0 11.8445744743349
22.6909184956658 0 8.39513069622820
24.3963823901878 0 4.3683608841201
25.0001231321231 0 0];
% This function initializes the plots
function init_plot
figure('units','normalized','position',[.1 .1 .8 .8],'name','Quadrotor AUS','numbertitle','off','color','w');
axes('units','normalized','position',[.2 .1 .6 .8]);
%axis auto
axis equal
% E1 = uicontrol('units','normalized','position',[.11 .85 .1 .07],'style','edit','fontsize',13,'string',0,'backgroundcolor','w');
% % E2 = uicontrol('units','normalized','position',[.11 .75 .1 .07],'style','edit','fontsize',13,'string',0,'backgroundcolor','w');
% % E3 = uicontrol('units','normalized','position',[.11 .65 .1 .07],'style','edit','fontsize',13,'string',0,'backgroundcolor','w');
% E4 = uicontrol('units','normalized','position',[.11 .55 .1 .07],'style','edit','fontsize',13,'string',0,'backgroundcolor','w');
% E5 = uicontrol('units','normalized','position',[.11 .45 .1 .07],'style','edit','fontsize',13,'string',0,'backgroundcolor','w');
% E6 = uicontrol('units','normalized','position',[.11 .35 .1 .07],'style','edit','fontsize',13,'string',0,'backgroundcolor','w');
% uicontrol('units','normalized','position',[.02 .83 .05 .07],'style','text','fontsize',13,'string','Altitude','backgroundcolor','w');
% % uicontrol('units','normalized','position',[.02 .73 .05 .07],'style','text','fontsize',13,'string','Roll','backgroundcolor','w');
% % uicontrol('units','normalized','position',[.02 .63 .05 .07],'style','text','fontsize',13,'string','Pitch','backgroundcolor','w');
% uicontrol('units','normalized','position',[.02 .53 .05 .07],'style','text','fontsize',13,'string','Yaw','backgroundcolor','w');
% uicontrol('units','normalized','position',[.02 .43 .05 .07],'style','text','fontsize',13,'string','X','backgroundcolor','w');
% uicontrol('units','normalized','position',[.02 .33 .05 .07],'style','text','fontsize',13,'string','Y','backgroundcolor','w');
% uicontrol('units','normalized','position',[.11 .25 .1 .07],'style','pushbutton','fontsize',13,'string','Go','callback',@Go1);
% Motors speed
% uicontrol('units','normalized','position',[.85 .83 .05 .07],'style','text','fontsize',13,'string','Front M','backgroundcolor',[.5 .7 1]);
% uicontrol('units','normalized','position',[.85 .73 .05 .07],'style','text','fontsize',13,'string','Right M','backgroundcolor',[.5 .7 1]);
% uicontrol('units','normalized','position',[.85 .63 .05 .07],'style','text','fontsize',13,'string','Rear M','backgroundcolor',[.5 .7 1]);
% uicontrol('units','normalized','position',[.85 .53 .05 .07],'style','text','fontsize',13,'string','Left M','backgroundcolor',[.5 .7 1]);
% O1 = uicontrol('units','normalized','position',[.91 .86 .08 .05],'style','text','fontsize',13,'string','0','backgroundcolor','w');
% O2 = uicontrol('units','normalized','position',[.91 .76 .08 .05],'style','text','fontsize',13,'string','0','backgroundcolor','w');
% O3 = uicontrol('units','normalized','position',[.91 .66 .08 .05],'style','text','fontsize',13,'string','0','backgroundcolor','w');
% O4 = uicontrol('units','normalized','position',[.91 .56 .08 .05],'style','text','fontsize',13,'string','0','backgroundcolor','w');
% disturbances
% uicontrol('units','normalized','position',[.13+.77 .35 .08 .07],'style','pushbutton','fontsize',13,'string','Z','callback',@d1);
% uicontrol('units','normalized','position',[.02+.77 .35 .08 .07],'style','pushbutton','fontsize',13,'string','Yaw','callback',@d2);
% uicontrol('units','normalized','position',[.13+.77 .25 .08 .07],'style','pushbutton','fontsize',13,'string','Pitch','callback',@d3);
% uicontrol('units','normalized','position',[.02+.77 .25 .08 .07],'style','pushbutton','fontsize',13,'string','Roll','callback',@d4);
% pop1 = uicontrol('units','normalized','position',[.02 .15 .19 .07],'style','popupmenu','fontsize',13,'string',{'3D view';'Camera view'},'callback',@view1,'value',1);
% axis([-5 5 -5 5 0 5])
%axis([-5 5 -5 5 0 10]);
axis auto
view(30,30)
grid on
hold on
xlabel('x')
%---------- Camera --------------------%
camproj perspective %creating 3D view
camva('manual') %view of camera
hlight = camlight('headlight'); %light
lighting gouraud
set(gcf,'Renderer','OpenGL')
line([-1 1],[0 0],[0 0])
line([0 0],[-1 1],[0 0],'color','r')
hold on
%img = imread('Untitled.png');
%imshow(img)
end
Quad.Z_KP = 10/1.7; % KP value in altitude control
Quad.Z_KI = 0.3; % KI value in altitude control
Quad.Z_KD = -10/1.980; % KD value in altitude control
Quad.Z_KI_lim = .25;
% Wil Selby
% Washington, DC
% May 30, 2015
% This script defines and initializes the variables for the quadrotor simulator. %
global Quad;
%% Initialize Variables
% Simulation Parameters
Quad.init = 0; % used in initilization
Quad.Ts = .01; % Sampling time (100 Hz)
Quad.sim_time = 10; % Simulation time (seconds)
Quad.counter = 1; % the counter that holds the time value
% Plotting Variables
Quad.t_plot = [0:Quad.Ts:Quad.sim_time-Quad.Ts]; % the time values
Quad.Xtemp = 0; % Temp variables used rotating and plotting quadrotor
Quad.Ytemp = 0; % Temp variables used rotating and plotting quadrotor
Quad.Ztemp = 0; % Temp variables used rotating and plotting quadrotor
% Environmental Parametes
Quad.g = 9.81; % Gravity (m/s^2)
% Quadrotor Physical Parameters
Quad.m = 1.4; % Quadrotor mass (kg)
Quad.l = .56; % Distance from the center of mass to the each motor (m)
Quad.t = .02; %Thickness of the quadrotor's arms for drawing purposes (m)
Quad.rot_rad = .1; %Radius of the propellor (m)
Quad.Kd = 1.3858e-6; % Drag torque coeffecient (kg-m^2)
Quad.Kdx = 0.16481; % Translational drag force coeffecient (kg/s)
Quad.Kdy = 0.31892; % Translational drag force coeffecient (kg/s)
Quad.Kdz = 1.1E-6; % Translational drag force coeffecient (kg/s)
Quad.Jx = .05; % Moment of inertia about X axis (kg-m^2)
Quad.Jy = .05; % Moment of inertia about Y axis (kg-m^2)
Quad.Jz = .24; % Moment of inertia about Z axis (kg-m^2)
% Quadrotor Sensor Paramaters
Quad.GPS_freq = (1/Quad.Ts)/1;
Quad.X_error = 0; %+/- m
Quad.Y_error = 0; %+/- m
Quad.Z_error = 0; %+/- m
Quad.x_acc_bias = 0.16594; % m/s^2
Quad.x_acc_sd = 0.0093907;
Quad.y_acc_bias = 0.31691; % m/s^2
Quad.y_acc_sd = 0.011045;
Quad.z_acc_bias = -8.6759; % m/s^2
Quad.z_acc_sd = 0.016189;
Quad.x_gyro_bias = 0.00053417; % rad/s
Quad.x_gyro_sd = 0.00066675;
Quad.y_gyro_bias = -0.0011035; % rad/s
Quad.y_gyro_sd = 0.00053642;
Quad.z_gyro_bias = 0.00020838; % rad/s
Quad.z_gyro_sd = 0.0004403;
Quad.ground_truth = 1; % Use perfect sensor measurements
Quad.sensor_unfiltered = 0; % Use sensor errors, no filter
Quad.sensor_kf = 0; % Use sensor error, Kalman Filter
% Motor Parameters
Quad.KT = 1.3328e-5; % Thrust force coeffecient (kg-m)
Quad.Jp = 0.044; % Moment of Intertia of the rotor (kg-m^2)
Quad.max_motor_speed = 925; % motors upper limit (rad/s)
Quad.min_motor_speed = 0; %-1*((400)^2); % motors lower limit (can't spin in reverse)
Quad.Obar = 0; % sum of motor speeds (O1-O2+O3-O4, N-m)
Quad.O1 = 0; % Front motor speed (raidans/s)
Quad.O2 = 0; % Right motor speed (raidans/s)
Quad.O3 = 0; % Rear motor speed (raidans/s)
Quad.O4 = 0; % Left motor speed (raidans/s)
%Translational Positions
Quad.X = 0; % Initial position in X direction GF (m)
Quad.Y = 0; % Initial position in Y direction GF (m)
Quad.Z = 10; % Initial position in Z direction GF (m)
Quad.X_BF = 0; % Initial position in X direction BF (m)
Quad.Y_BF = 0; % Initial position in Y direction BF (m)
Quad.Z_BF = 0; % Initial position in the Z direction BF (m)
%Translational Velocities
Quad.X_dot = 0; % Initial velocity in X direction GF (m/s)
Quad.Y_dot = 0; % Initial velocity in Y direction GF (m/s)
Quad.Z_dot = 0; % Initial velocity in Z direction GF (m/s)
Quad.X_dot_BF = 0; % Initial velocity in X direction BF (m/s)
Quad.Y_dot_BF = 0; % Initial velocity in Y direction BF (m/s)
Quad.Z_dot_BF = 0; % Initial velocity in Y direction BF (m/s)
%Angular Positions
Quad.phi = 0; % Initial phi value (rotation about X GF, roll, radians)
Quad.theta = 0; % Initial theta value (rotation about Y GF, pitch, radians)
Quad.psi = 0; % Initial psi value (rotation about Z GF, yaw, radians)
%Angular Velocities
Quad.p = 0; % Initial p value (angular rate rotation about X BF, radians/s)
Quad.q = 0; % Initial q value (angular rate rotation about Y BF, radians/s)
Quad.r = 0; % Initial r value (angular rate rotation about Z BF, radians/s)
% Desired variables
Quad.X_des_GF = 10; % desired value of X in Global frame
Quad.Y_des_GF = 10; % desired value of Y in Global frame
Quad.Z_des_GF = 10; % desired value of Z in Global frame
Quad.X_des = 0; % desired value of X in Body frame
Quad.Y_des = 0; % desired value of Y in Body frame
Quad.Z_des = 0; % desired value of Z in Body frame
Quad.phi_des = 0; % desired value of phi (radians)
Quad.theta_des = 0; % desired value of theta (radians)
Quad.psi_des = 0; % desired value of psi (radians)
% Measured variables
Quad.X_meas = 0;
Quad.Y_meas = 0;
Quad.Z_meas = 0;
Quad.phi_meas = 0;
Quad.theta_meas = 0;
Quad.psi_meas = 0;
% Disturbance Variables
Quad.Z_dis = 0; % Disturbance in Z direction
Quad.X_dis = 0; % Disturbance in X direction
Quad.Y_dis = 0; % Ddisturbance in Y direction
Quad.phi_dis = 0; % Disturbance in Yaw direction
Quad.theta_dis = 0; % Disturbance in Pitch direction
Quad.psi_dis = 0; % Disturbance in Roll direction
% Control Inputs
Quad.U1 = 0; % Total thrust (N)
Quad.U2 = 0; % Torque about X axis BF (N-m)
Quad.U3 = 0; % Torque about Y axis BF (N-m)
Quad.U4 = 0; % Torque about Z axis BF (N-m)
% Control Limits (update values)
Quad.U1_max = 43.5; % Quad.KT*4*Quad.max_motor_speed^2
Quad.U1_min = 0; %
Quad.U2_max = 6.25; % Quad.KT*Quad.l*Quad.max_motor_speed^2
Quad.U2_min = -6.25; % Quad.KT*Quad.l*Quad.max_motor_speed^2
Quad.U3_max = 6.25; % Quad.KT*Quad.l*Quad.max_motor_speed^2
Quad.U3_min = -6.25; % Quad.KT*Quad.l*Quad.max_motor_speed^2
Quad.U4_max = 2.25; % Quad.Kd*2*Quad.max_motor_speed^2
Quad.U4_min = -2.25;% Quad.Kd*2*Quad.max_motor_speed^2
% PID parameters
Quad.X_KP = .5; % KP value in X position control
Quad.X_KI = .25; % KI value in X position control
Quad.X_KD = -.7; % KD value in X position control
Quad.X_KI_lim = .25; % Error to start calculating integral term
Quad.Y_KP = .5; % KP value in Y position control
Quad.Y_KI = .25; % KI value in Y position control
Quad.Y_KD = -.7; % KD value in Y position control
Quad.Y_KI_lim = .25; % Error to start calculating integral term
Quad.Z_KP = 5; % KP value in altitude control
Quad.Z_KI = 1; % KI value in altitude control
Quad.Z_KD = -5; % KD value in altitude control
Quad.Z_KI_lim = .25; % Error to start calculating integral term
Quad.phi_KP = 2; % KP value in roll control 2
Quad.phi_KI = 1; % KI value in roll control 1
Quad.phi_KD = -.0; % KD value in roll control -.5
Quad.phi_max = pi/4; % Maximum roll angle commanded
Quad.phi_KI_lim = 2*(2*pi/360); % Error to start calculating integral
Quad.theta_KP = 2; % KP value in pitch control 2
Quad.theta_KI = 1; % KI value in pitch control 1
Quad.theta_KD = -.0; % KD value in pitch control -.5
Quad.theta_max = pi/4; % Maximum pitch angle commanded
Quad.theta_KI_lim = 2*(2*pi/360); % Error to start calculating integral
Quad.psi_KP = 2; % KP value in yaw control
Quad.psi_KI = 0.75; % KI value in yaw control .75
Quad.psi_KD = -0.0; % KD value in yaw control -.5
Quad.psi_KI_lim = 8*(2*pi/360); % Error to start calculating integral
Quad.p_KP = 3; % KP value in pitch control 2
Quad.p_KI = 0.7; % KI value in pitch control
Quad.p_KD = -.01; % KD value in pitch control -.5
Quad.p_max = 50*(2*pi/360); % Maximum pitch angle commanded
Quad.p_KI_lim = 10*(2*pi/360); % Error to start calculating integral
Quad.q_KP = 3; % KP value in pitch control
Quad.q_KI = 0.7; % KI value in pitch control
Quad.q_KD = -.01; % KD value in pitch control -.5
Quad.q_max = 50*(2*pi/360); % Maximum pitch angle commanded
Quad.q_KI_lim = 10*(2*pi/360); % Error to start calculating integral
Quad.r_KP = 3; % KP value in pitch control
Quad.r_KI = 0.7; % KI value in pitch control
Quad.r_KD = -.01; % KD value in pitch control
Quad.r_max = 50*(2*pi/360); % Maximum pitch angle commanded
Quad.r_KI_lim = 10*(2*pi/360); % Error to start calculating integral
% Wil Selby
% Washington, DC
% May 30, 2015
% This script defines and initializes the variables for the quadrotor simulator. %
global Quad;
%% Initialize Variables
% Simulation Parameters
Quad.init = 0; % used in initilization
Quad.Ts = .01; % Sampling time (100 Hz)
Quad.sim_time = 10; % Simulation time (seconds)
Quad.counter = 1; % the counter that holds the time value
% Plotting Variables
Quad.t_plot = [0:Quad.Ts:Quad.sim_time-Quad.Ts]; % the time values
Quad.Xtemp = 0; % Temp variables used rotating and plotting quadrotor
Quad.Ytemp = 0; % Temp variables used rotating and plotting quadrotor
Quad.Ztemp = 0; % Temp variables used rotating and plotting quadrotor
% Environmental Parametes
Quad.g = 9.81; % Gravity (m/s^2)
% Quadrotor Physical Parameters
Quad.m = 1.4; % Quadrotor mass (kg)
Quad.l = .56; % Distance from the center of mass to the each motor (m)
Quad.t = .02; %Thickness of the quadrotor's arms for drawing purposes (m)
Quad.rot_rad = .1; %Radius of the propellor (m)
Quad.Kd = 1.3858e-6; % Drag torque coeffecient (kg-m^2)
Quad.Kdx = 0.16481; % Translational drag force coeffecient (kg/s)
Quad.Kdy = 0.31892; % Translational drag force coeffecient (kg/s)
Quad.Kdz = 1.1E-6; % Translational drag force coeffecient (kg/s)
Quad.Jx = .05; % Moment of inertia about X axis (kg-m^2)
Quad.Jy = .05; % Moment of inertia about Y axis (kg-m^2)
Quad.Jz = .24; % Moment of inertia about Z axis (kg-m^2)
% Quadrotor Sensor Paramaters
Quad.GPS_freq = (1/Quad.Ts)/1;
Quad.X_error = .01; %+/- m
Quad.Y_error = .01; %+/- m
Quad.Z_error = .02; %+/- m
Quad.x_acc_bias = 0.16594; % m/s^2
Quad.x_acc_sd = 0.0093907;
Quad.y_acc_bias = 0.31691; % m/s^2
Quad.y_acc_sd = 0.011045;
Quad.z_acc_bias = -8.6759; % m/s^2
Quad.z_acc_sd = 0.016189;
Quad.x_gyro_bias = 0.00053417; % rad/s
Quad.x_gyro_sd = 0.00066675;
Quad.y_gyro_bias = -0.0011035; % rad/s
Quad.y_gyro_sd = 0.00053642;
Quad.z_gyro_bias = 0.00020838; % rad/s
Quad.z_gyro_sd = 0.0004403;
Quad.ground_truth = 1; % Use perfect sensor measurements
Quad.sensor_unfiltered = 0; % Use sensor errors, no filter
Quad.sensor_kf = 0; % Use sensor error, Kalman Filter
% Motor Parameters
Quad.KT = 1.3328e-5; % Thrust force coeffecient (kg-m)
Quad.Jp = 0.044; % Moment of Intertia of the rotor (kg-m^2)
Quad.max_motor_speed = 925; % motors upper limit (rad/s)
Quad.min_motor_speed = 0; %-1*((400)^2); % motors lower limit (can't spin in reverse)
Quad.Obar = 0; % sum of motor speeds (O1-O2+O3-O4, N-m)
Quad.O1 = 0; % Front motor speed (raidans/s)
Quad.O2 = 0; % Right motor speed (raidans/s)
Quad.O3 = 0; % Rear motor speed (raidans/s)
Quad.O4 = 0; % Left motor speed (raidans/s)
%Translational Positions
Quad.X = 0; % Initial position in X direction GF (m)
Quad.Y = 0; % Initial position in Y direction GF (m)
Quad.Z = 1; % Initial position in Z direction GF (m)
Quad.X_BF = 0; % Initial position in X direction BF (m)
Quad.Y_BF = 0; % Initial position in Y direction BF (m)
Quad.Z_BF = 0; % Initial position in the Z direction BF (m)
%Translational Velocities
Quad.X_dot = 0; % Initial velocity in X direction GF (m/s)
Quad.Y_dot = 0; % Initial velocity in Y direction GF (m/s)
Quad.Z_dot = 0; % Initial velocity in Z direction GF (m/s)
Quad.X_dot_BF = 0; % Initial velocity in X direction BF (m/s)
Quad.Y_dot_BF = 0; % Initial velocity in Y direction BF (m/s)
Quad.Z_dot_BF = 0; % Initial velocity in Y direction BF (m/s)
%Angular Positions
Quad.phi = 0; % Initial phi value (rotation about X GF, roll, radians)
Quad.theta = 0; % Initial theta value (rotation about Y GF, pitch, radians)
Quad.psi = 0; % Initial psi value (rotation about Z GF, yaw, radians)
%Angular Velocities
Quad.p = 0; % Initial p value (angular rate rotation about X BF, radians/s)
Quad.q = 0; % Initial q value (angular rate rotation about Y BF, radians/s)
Quad.r = 0; % Initial r value (angular rate rotation about Z BF, radians/s)
% Desired variables
Quad.X_des_GF = 1; % desired value of X in Global frame
Quad.Y_des_GF = 1; % desired value of Y in Global frame
Quad.Z_des_GF = 1; % desired value of Z in Global frame
Quad.X_des = 0; % desired value of X in Body frame
Quad.Y_des = 0; % desired value of Y in Body frame
Quad.Z_des = 0; % desired value of Z in Body frame
Quad.phi_des = 0; % desired value of phi (radians)
Quad.theta_des = 0; % desired value of theta (radians)
Quad.psi_des = 0; % desired value of psi (radians)
% Measured variables
Quad.X_meas = 0;
Quad.Y_meas = 0;
Quad.Z_meas = 0;
Quad.phi_meas = 0;
Quad.theta_meas = 0;
Quad.psi_meas = 0;
% Disturbance Variables
Quad.Z_dis = 0; % Disturbance in Z direction
Quad.X_dis = 0; % Disturbance in X direction
Quad.Y_dis = 0; % Ddisturbance in Y direction
Quad.phi_dis = 0; % Disturbance in Yaw direction
Quad.theta_dis = 0; % Disturbance in Pitch direction
Quad.psi_dis = 0; % Disturbance in Roll direction
% Control Inputs
Quad.U1 = 0; % Total thrust (N)
Quad.U2 = 0; % Torque about X axis BF (N-m)
Quad.U3 = 0; % Torque about Y axis BF (N-m)
Quad.U4 = 0; % Torque about Z axis BF (N-m)
% Control Limits (update values)
Quad.U1_max = 43.5; % Quad.KT*4*Quad.max_motor_speed^2
Quad.U1_min = 0; %
Quad.U2_max = 6.25; % Quad.KT*Quad.l*Quad.max_motor_speed^2
Quad.U2_min = -6.25; % Quad.KT*Quad.l*Quad.max_motor_speed^2
Quad.U3_max = 6.25; % Quad.KT*Quad.l*Quad.max_motor_speed^2
Quad.U3_min = -6.25; % Quad.KT*Quad.l*Quad.max_motor_speed^2
Quad.U4_max = 2.25; % Quad.Kd*2*Quad.max_motor_speed^2
Quad.U4_min = -2.25;% Quad.Kd*2*Quad.max_motor_speed^2
% PID parameters
Quad.X_KP = .95; % KP value in X position control
Quad.X_KI = .25; % KI value in X position control
Quad.X_KD = -.7; % KD value in X position control
Quad.X_KI_lim = .25; % Error to start calculating integral term
Quad.Y_KP = 0.95; % KP value in Y position control
Quad.Y_KI = .3; % KI value in Y position control
Quad.Y_KD = -.7; % KD value in Y position control
Quad.Y_KI_lim = .25; % Error to start calculating integral term
Quad.Z_KP = 5; % KP value in altitude control
Quad.Z_KI = 1; % KI value in altitude control
Quad.Z_KD = -5; % KD value in altitude control
Quad.Z_KI_lim = .25; % Error to start calculating integral term
Quad.phi_KP = 8; % KP value in roll control 2
Quad.phi_KI = 1; % KI value in roll control 1
Quad.phi_KD = -0.5; % KD value in roll control -.5
Quad.phi_max = pi/4; % Maximum roll angle commanded
Quad.phi_KI_lim = 2*(2*pi/360); % Error to start calculating integral
Quad.theta_KP = 8; % KP value in pitch control 2
Quad.theta_KI = 1; % KI value in pitch control 1
Quad.theta_KD = -0.5; % KD value in pitch control -.5
Quad.theta_max = pi/4; % Maximum pitch angle commanded
Quad.theta_KI_lim = 2*(2*pi/360); % Error to start calculating integral
Quad.psi_KP = 6; % KP value in yaw control
Quad.psi_KI = .75; % KI value in yaw control .75
Quad.psi_KD = -.5; % KD value in yaw control -.5
Quad.psi_KI_lim = 8*(2*pi/360); % Error to start calculating integral
Quad.p_KP = 3; % KP value in pitch control 2
Quad.p_KI = 0.2; % KI value in pitch control
Quad.p_KD = -0.05; % KD value in pitch control -.5
Quad.p_max = 50*(2*pi/360); % Maximum pitch angle commanded
Quad.p_KI_lim = 10*(2*pi/360); % Error to start calculating integral
Quad.q_KP = 3; % KP value in pitch control
Quad.q_KI = 0.2; % KI value in pitch control
Quad.q_KD = -0.05; % KD value in pitch control -.5
Quad.q_max = 50*(2*pi/360); % Maximum pitch angle commanded
Quad.q_KI_lim = 10*(2*pi/360); % Error to start calculating integral
Quad.r_KP = 3; % KP value in pitch control
Quad.r_KI = 0.2; % KI value in pitch control
Quad.r_KD = -0.05; % KD value in pitch control
Quad.r_max = 50*(2*pi/360); % Maximum pitch angle commanded
Quad.r_KI_lim = 10*(2*pi/360); % Error to start calculating integral
Host ID: DISK_SERIAL_NUM=be57ad4a Release: R2017b Windows User Name: kotvytsk | true |
2ce7b3124d179288ad40e5868c682f820861fcd3 | Python | EDGsheryl/CET-score-spider | /excel.py | UTF-8 | 299 | 2.65625 | 3 | [] | no_license | #coding=gbk
import xlwt
wbk = xlwt.Workbook()
sheet = wbk.add_sheet('sheet 1')
md = open('out.txt', 'r')
row=2
col=1
for stringmd in md.readlines():
str= stringmd.split(' ')
for s in str:
sheet.write(row,col,s)
col=col+1
col=1
row=row+1
wbk.save('chhulinew.xls')
| true |
228c78c18bab603dfc10a7a4bc524db3c336979c | Python | kimollG/tp_project | /backend/auth.py | UTF-8 | 2,746 | 2.59375 | 3 | [] | no_license | from datetime import datetime
from enum import Enum
from fastapi import Depends, HTTPException, status
from jose import jwt, JWTError
from models import TokenEntry
from passlib.context import CryptContext
class AuthBackend:
def __init__(self, password_context):
self._password_context: CryptContext = password_context
async def get_hash_for_password(self, password):
return self._password_context.hash(password)
async def verify_password(self, password, hashed_password):
return self._password_context.verify(password, hashed_password)
def create_login_manager(oauth_scheme):
class LoginManager:
def __init__(
self,
secret_key,
algorithm,
default_expiration_interval=None):
self._secret_key = secret_key
self._algorithm = algorithm
self._default_expiration_interval = default_expiration_interval
async def create_access_token(self, data, expiration_interval=None):
expiration_interval = expiration_interval \
or self._default_expiration_interval
if not expiration_interval:
raise ValueError(
"Neither default nor special expiration date provided"
)
to_encode = data.copy()
expiration_date = expiration_interval + datetime.utcnow()
to_encode["exp"] = expiration_date
encoded = jwt.encode(
to_encode,
key=self._secret_key,
algorithm=self._algorithm,
)
return encoded
def get_current_user(self, token=Depends(oauth_scheme)):
authentication_error = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect password or username",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(
token,
self._secret_key,
algorithms=[self._algorithm]
)
print(payload)
if not payload["valid"]:
raise authentication_error
loaded = TokenEntry.parse_obj(payload)
return loaded
except JWTError:
raise authentication_error
def authenticate(self, user):
return True
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
return LoginManager
| true |
386c29f7815e930aca5214f0b59a63c19ff7d02f | Python | 170030889/Python | /sum.py | UTF-8 | 48 | 3.046875 | 3 | [] | no_license | a = 3
b = 4
sum = a + b
print("sum:", sum) | true |
5b7b8490ad45fc31651fd4ca0f6c09836abde615 | Python | chenjm1/wargame | /wargame/knight.py | UTF-8 | 1,400 | 3.140625 | 3 | [] | no_license | from gameunit import GameUnit
from comfunc import print_bold
class Knight(GameUnit):
def __init__(self,name='foo'):
super().__init__(name=name)
self.max_health=50
self.health_meter=self.max_health
self.unit_type='friend'
def info(self):
print('I am Knight')
def acquire_hut(self,hut):
print_bold("Entering hut %d..." % hut.number, end=' ')
if (isinstance(hut.occupant,GameUnit) and
hut.occupant.unit_type=='enemy'):
play_option=True
self.show_health(end=' ')
hut.occupant.show_health(end=' ')
while play_option:
play_option = input('attack?YES(y)/NO(n):')
if play_option == 'n':
self.run_away()
break
self.attack(hut.occupant)
if hut.occupant.health_meter<=0:
print('kill Orc')
hut.acquire(self)
break
if self.health_meter<=0:
print('you dead')
break
else:
if hut.get_occupant_type=='empty':
print_bold('Hut is empty')
else:
print_bold('Hut is Signed')
hut.acquire(self)
self.heal()
def run_away(self):
print('RUNAWAY')
self.enemy=None | true |
d723196b40195ae90745570f35f85e4d8bb5880f | Python | Easimer/advent-of-code-2018 | /day4/day4.py | UTF-8 | 3,747 | 3.234375 | 3 | [] | no_license | import sys
EVENT_ERROR = 0
EVENT_SHIFT = 1
EVENT_SLEEP = 2
EVENT_AWAKE = 3
def read_input():
events = []
for line in sys.stdin:
tokens = line.split()
date = [int(x) for x in tokens[0][1:].split('-')]
time = [int(x) for x in tokens[1][0:-1].split(':')]
event = ' '.join(tokens[2:])
event_type = EVENT_ERROR
guard = -1
if tokens[2] == "Guard":
event_type = EVENT_SHIFT
guard = int(tokens[3][1:])
elif tokens[2] == "falls":
event_type = EVENT_SLEEP
elif tokens[2] == "wakes":
event_type = EVENT_AWAKE
entry = (tuple(date[0:3]), tuple(time[0:2]), event_type, guard)
events.append(entry)
return events
def render(timeline, interval):
for i in range(interval[0][1], interval[1][1]):
timeline[i] = 1
return timeline
def simulate(events):
timetable = []
current_guard = -1
# ((month, day), guard, row)
entry = [(0, 0), -1, [0] * 60]
# until not empty:
# fetch SHIFT entry
# fetch SLEEP-AWAKE entry pairs until it's not a SLEEP entry
i = 0
while i < len(events):
event1 = events[i]
if event1[2] == EVENT_SLEEP:
event2 = events[i + 1]
if event2[2] != EVENT_AWAKE:
print("FUCK")
interval = (event1[1], event2[1])
entry[2] = render(entry[2], interval)
i += 2
print(f"Guard {entry[1]} sleeping {interval}")
elif event1[2] == EVENT_SHIFT:
if entry[1] != -1:
timetable.append(entry)
entry = [(0, 0), -1, [0] * 60]
entry[1] = event1[3]
entry[0] = event1[0][1:3]
i += 1
continue
if entry[1] != -1:
timetable.append(entry)
return timetable
def visualize_timetable(timetable):
for timeline in timetable:
print(timeline[0], end = '')
print(str(timeline[1]) + '\t', end = '')
for m in timeline[2]:
if m == 0:
print('.', end='')
else:
print('#', end='')
print()
def find_sleepiest_guard(timetable):
gmap = {}
for timeline in timetable:
if timeline[1] in gmap:
gmap[timeline[1]] += sum(timeline[2])
else:
gmap[timeline[1]] = sum(timeline[2])
guard = max(gmap, key = gmap.get)
return guard
def guard_sleepiest_minute(timetable, guard):
mmap = {}
for timeline in timetable:
if timeline[1] != guard:
continue
i = 0
while i < 60:
if not i in mmap:
mmap[i] = 0
mmap[i] += timeline[2][i]
i += 1
minute = max(mmap, key = mmap.get)
return minute
def part2(timetable):
# (guard, minute) -> N
# max(N, (guard, minute))
gmmap = {}
for timeline in timetable:
for i in range(60):
if timeline[2][i]:
key = (timeline[1], i)
if not key in gmmap:
gmmap[key] = 0
gmmap[key] += 1
mkey = None
mkeym = 0
for k in gmmap:
if gmmap[k] > mkeym:
mkeym = gmmap[k]
mkey = k
for k in gmmap:
if gmmap[k] == mkeym:
print(k)
#print(gmmap)
return mkey
def main():
events = read_input()
events.sort()
timetable = simulate(events)
visualize_timetable(timetable)
guard = find_sleepiest_guard(timetable)
minute = guard_sleepiest_minute(timetable, guard)
print(f"Part 1: {guard * minute}")
mkey = part2(timetable)
print(f"Part 2: {mkey[0]}x{mkey[1]} = {mkey[0] * (mkey[1])}")
main()
| true |
b43de5ebbe96878004ad3721f53d2bd969f0a9e3 | Python | bakadave/KMLcreator | /helperFunctions.py | UTF-8 | 5,151 | 3.203125 | 3 | [] | no_license | from math import sin, cos, sqrt, atan2, radians
from pnt2line import pnt2line
import sys
borderPath = "maps/HUNborder.txt"
border = []
def ft2m (feet):
return feet * 0.3048
#creates a list of tuples from the coordinates
def splitCoordinates(string):
alongBorder = False
lst = []
arr = string.split(' - ')
for coord in arr:
lat = dms2dd(coordStr2dms(coord.rpartition(' ')[0]))
lon = dms2dd(coordStr2dms(coord.partition(' ')[2]))
crd = (lon, lat)
#check if national border needs to be inserted after previous point
if alongBorder:
handleBorder(lst[-1], crd, lst)
alongBorder = False
#if "border" appears in string, border polygon needs to be inserted on next iteration
if "border" in coord:
alongBorder = True
lst.append(crd)
return lst
def handleBorder(firstP, lastP, lst):
if not border:
importBorder()
# find closest point in border file to firstP
# *we will find this point as an anchor the find the position we need to insert firstP into
minDist = sys.maxsize
idx1 = 0
for idx, coord in enumerate(border):
dst = calculateDistance(firstP, coord)
if dst < minDist:
minDist = dst
idx1 = idx
# find the ideal position to insert firstP
# *firstP needs to be inserted between two points, if the closest point is behind firstP, it needs to be skipped
# *it is done by examining the distance of the firstP and the two line segments closest to it, if the line segment "after" the closest point is closer,
# *closest point is shifted
if pnt2line(firstP,border[idx1 - 1], border[idx1]) > pnt2line(firstP,border[idx1], border[idx1 + 1]):
idx1 += 1
#find closest point in border file to lastP
minDist = sys.maxsize
idx2 = 0
for idx, coord in enumerate(border):
dst = calculateDistance(lastP, coord)
if dst < minDist:
minDist = dst
idx2 = idx
#find the ideal position to insert lastP
if pnt2line(lastP,border[idx2 - 1], border[idx2]) > pnt2line(firstP,border[idx2], border[idx2 + 1]):
idx2 += 1
# handle exception: it is posible that no point needs to be inserted
if (idx1 == idx2):
return
idx = min(idx1, idx2)
while idx != (max(idx1, idx2) + 1):
lst.append(border[idx])
idx += 1
return
# calculates the distance between two points on Earth surface
def calculateDistance(pt1, pt2):
# approximate radius of earth in km
R = 6373.0
lon1 = radians(pt1[0])
lat1 = radians(pt1[1])
lon2 = radians(pt2[0])
lat2 = radians(pt2[1])
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
c = 2 * atan2(sqrt(a), sqrt(1 - a))
return R * c
# import national border polygon from file specified in "borderPath"
def importBorder():
with open (borderPath, 'r') as file:
for line in file:
line = line[:-1]
lat = float(line.partition(' ')[2])
lon = float(line.partition(' ')[0])
crd = (lon, lat)
border.append(crd)
# parses coordinate string from AIP to degree decimal format
def coordStr2dd(val):
if val[0] == "0":
len = 3
else:
len = 2
integer = int(val[:len])
dec = int(val[len:-1]) / 10000
return integer + dec
# parses coordinate string from AIP to degrees, minutes, seconds format
def coordStr2dms(val):
if val[0] == "0":
len = 3
else:
len = 2
d = int(val[:len])
m = int(val[len:len + 2])
s = int(val[len + 2:len + 4])
return [d,m,s]
# converts altitude in string to feet
def altStr2Num(val):
if ("FL" in val):
return int(val.partition(' ')[2]) * 100
if ("FT" in val):
return int(val.partition(' ')[0])
if ("GND" in val):
return int(0)
# degree decimal to degrees, minutes, seconds (DMS) converter
def dd2dms(deg):
d = int(deg)
md = abs(deg - d) * 60
m = int(md)
sd = (md - m) * 60
return [d, m, sd]
# prints DMS in a pretty format
def printDMS(deg):
print(str(deg[0]) + "°" + str(deg[1]) + "'" + str(round(deg[2],4)) + "\"")
# degrees, minutes, seconds (DMS) to degree decimal converter
def dms2dd(deg):
dd = float(deg[0]) + float(deg[1])/60 + float(deg[2]) / (60*60)
dd = round(dd,5)
return dd
# returns the cross product of 2D vectors
def crossProduct(a, b):
return a[0] * b[1] - b[0] * a[1]
# determines polgon rotation
# returns True for CW and False for CCW
def isClockWise(lst):
idx = 0
sum = 0
while idx < (len(lst) - 1):
sum += crossProduct(lst[idx], lst[idx + 1])
idx +=1
# cross product is positive if rotation is clockwise
if sum > 0:
return True
if sum < 0:
return False
else:
return False
if __name__ == "__main__":
splitCoordinates("474541N 0183928E along border HUNGARY_SLOVAKREPUBLIC - 474548N 0182806E - 472827N 0182806E - 472956N 0183216E - 473231N 0183928E - 474541N 0183928E") | true |
c6f6e933c46a2cd9cc34083ae34a2c1e6d2f7116 | Python | Alireza-Gerami/UBPythonWorkshop | /Session-3/meet-code/1.py | UTF-8 | 178 | 4.15625 | 4 | [] | no_license | for i in range(1, 11):
for j in range(1, 11):
if i * j >= 10:
print(i * j, end = ' ')
else:
print(i * j, end = ' ')
print() | true |
5173c1545e2a5c383ce059a24d938c6c677f567f | Python | JasonArce/UMN | /Projects/Artificial Intelligence (Python + Snake)/CodeAnaysis4/code/problems/kenken.py | UTF-8 | 8,349 | 3.5 | 4 | [] | no_license | import functools
from functools import reduce
from csp_problem import ConstraintProblem
# The primary problem set-up consists of "variables" and "constraints":
# "variables" are a dictionary of constraint variables (of type ConstraintVar), example variables['A1']
# For KenKen, each row is labeled with a letter. "A" is the top row.
# And each column is a number. "1" is the leftmost column.
#
# "constraints" are the unary and binary constraints that must be satisfied.
# For KenKen, constraints will include uniqueness for rows and columns (generic for all kenken's)
# And for the specifics of the puzzle, which are provided in a "user-friendly" format,
# then converted to a usable representation.
#
class KenKen(ConstraintProblem):
# Input is the size of the puzzle and user-friendly constraints
def __init__(self, size, constraints):
ConstraintProblem.__init__(self)
self.size = size
self.row_labels = []
self.column_labels = []
# Create the "A1", "A2", ... variables
# Each variable has a name and domain. It's stored in a dictionary indexed by the name (e.g. "A1")
self.construct_variables()
# Construct the constraints to ensure unique column and row values
self.construct_all_unique()
# Convert the user-friendly constraints to formally defined ones.
self.construct_user_constraints(constraints)
# This is not really used, but might come in handy later -- make separate groups for unary and binary.
self.sort_constraints()
def construct_variables(self):
# creating labels 'A1' 'A2' ... 'B1' 'B2' ...
A_ascii = ord('A')
self.row_labels = [chr(i+A_ascii) for i in range(0, self.size)]
self.column_labels = [str(i) for i in range(1, self.size+1)]
for row in self.row_labels:
for col in self.column_labels:
# Make it and store in the dictionary of variables.
self.variables[row+col] = self.Variable(row+col, [d for d in range(1, self.size+1)])
def construct_rows_unique_constraints(self):
if self.row_labels == [] or self.column_labels == []:
# This is not a graceful exit.
print('You must first call construct_variables() before constructing constraints')
exit()
for row in self.row_labels:
labels = []
for col in self.column_labels:
labels.append( row+col )
# Creates binary "not ==" constraints for all pair combinations in the list
self.create_all_unique_constraints(labels)
def construct_columns_unique_constraints(self):
if self.row_labels == [] or self.column_labels == []:
print('You must first call construct_variables() before constructing constraints')
return
for col in self.column_labels:
labels = []
for row in self.row_labels:
labels.append( row+col )
# Creates binary "not ==" constraints for all pair combinations in the list
self.create_all_unique_constraints(labels)
def construct_all_unique(self):
self.construct_columns_unique_constraints()
self.construct_rows_unique_constraints()
def construct_user_constraints(self, user_friendly_constraints):
# constraints are provided in a more user-friendly format
# EXAMPLES: ['-', 2, ['A1','B1']], ['==', 1, ['B2']]
# This will convert that to a format required by the csp algorithm
# ufc is a triplet:
# - index 0 is the operator,
# - index 1 is the result value
# - index 2 is a variable list (required to have 1 or 2 elements only)
def make_lambda(opchar, value, unary=False, tri=False):
if unary:
return (lambda x: ConstraintProblem.operators[opchar](x, value))
elif tri:
return (lambda x,y,z:
(value == ConstraintProblem.operators[opchar](ConstraintProblem.operators[opchar](x,y) ,z)) or
(value == ConstraintProblem.operators[opchar](ConstraintProblem.operators[opchar](x,z) ,y)) or
(value == ConstraintProblem.operators[opchar](ConstraintProblem.operators[opchar](y,x) ,z)) or
(value == ConstraintProblem.operators[opchar](ConstraintProblem.operators[opchar](y,z) ,x)) or
(value == ConstraintProblem.operators[opchar](ConstraintProblem.operators[opchar](z,x) ,y)) or
(value == ConstraintProblem.operators[opchar](ConstraintProblem.operators[opchar](z,y) ,x))
)
else:
return (lambda x,y:
(value == ConstraintProblem.operators[opchar](x,y)) or
(value == ConstraintProblem.operators[opchar](y,x)))
for ufc in user_friendly_constraints:
if len(ufc[2]) == 1: # unary constraint
# EXAMPLE: constraint is ['==',2,['A3']]
# This creates Constraint(['A3'], lambda x: x == 2)
# Note that the lambda example is infix, however
# we are using operators[..]( , ) -- a prefix notation (but same result)
# The call to make_lambda is used because of "lazy evaluation"
# of the lambda fn -- meaning it won't dereference until it evaluates the lambda.
# At that point, ufc is equal to the last constraint evaluated.
# Do not do lambda x: operators[ufc[0]](x,ufc[2]) !!!! (It took me a bit to debug that)
self.all_constraints.append(self.Constraint(ufc[2], ufc[0],
make_lambda(ufc[0], ufc[1], unary=True)))
elif len(ufc[2]) == 2: # binary constraint
# EXAMPLE: constraint is ['-',2,['A2','B2']]
# This requires 2 constraints because Revise() only edits the
# first variable listed.
# So this creates one for ['A2', 'B2'] and one for ['B2', 'A2']
# Call to make_lambda used because of lazy evaluation (see note in above if-statement
vars1 = ufc[2]
vars2 = [ufc[2][1], ufc[2][0]]
#print('Vars1:' + ''.join(vars1) + ' Vars2:' + ''.join(vars2))
c = self.Constraint(vars1, ufc[0], make_lambda(ufc[0], ufc[1]))
self.all_constraints.append(c)
self.add_neighbor(vars1[1], c)
c = self.Constraint(vars2, ufc[0], make_lambda(ufc[0], ufc[1]))
self.all_constraints.append(c)
self.add_neighbor(vars1[0], c)
elif len(ufc[2]) == 3:
vars1 = ufc[2]
vars2 = [ufc[2][0], ufc[2][2], ufc[2][1]]
vars3 = [ufc[2][1], ufc[2][0], ufc[2][2]]
vars4 = [ufc[2][1], ufc[2][2], ufc[2][0]]
vars5 = [ufc[2][2], ufc[2][0], ufc[2][1]]
vars6 = [ufc[2][2], ufc[2][1], ufc[2][0]]
#Vars1:F1F2F3 Vars2:F1F3F2 Vars3:F2F1F3 Vars4:F2F3F1 Vars5:F3F1F2 Vars6:F3F2F1
print('Vars1:' + ''.join(vars1) + ' Vars2:' + ''.join(vars2) + ' Vars3:' + ''.join(vars3) + ' Vars4:' + ''.join(vars4) + ' Vars5:' + ''.join(vars5) + ' Vars6:' + ''.join(vars6))
c = self.Constraint(vars1, ufc[0], make_lambda(ufc[0], ufc[1], tri=True))
self.all_constraints.append(c)
self.add_neighbor(vars1[1], c)
self.add_neighbor(vars1[2], c)
c = self.Constraint(vars3, ufc[0], make_lambda(ufc[0], ufc[1], tri=True))
self.all_constraints.append(c)
self.add_neighbor(vars3[1], c)
self.add_neighbor(vars3[2], c)
c = self.Constraint(vars5, ufc[0], make_lambda(ufc[0], ufc[1], tri=True))
self.all_constraints.append(c)
self.add_neighbor(vars5[1], c)
self.add_neighbor(vars5[2], c)
else:
print("Not yet equipped to manage more than arc consistency")
return
def pretty_print(self, variables):
for k, v in variables.items():
print(v.name,' ', v.domain)
| true |
04143cd539305a2bc4fc7b3c27ec87ea26560ec1 | Python | mlnotes/hfml | /optimize/opt.py | UTF-8 | 5,349 | 2.9375 | 3 | [] | no_license | # -*- coding: UTF-8 -*-
import time
import sys
import random
import math
def getminutes(t):
x = time.strptime(t, '%H:%M')
return x[3]*60 + x[4]
def m2t(minutes):
return '%02d:%02d' % (minutes/60, minutes%60)
people = [('Seymour', 'BOS'),
('Franny', 'DAL'),
('Zooey', 'CAK'),
('Walt', 'MIA'),
('Buddy', 'ORD'),
('Les', 'OMA')]
destination = 'LGA'
flights = {}
# read flights from file
for line in file('schedule.txt'):
origin, dest, depart, arrive, price = line.strip().split(',')
flights.setdefault((origin, dest),[])
flights[(origin, dest)].append(
(getminutes(depart),
getminutes(arrive),
int(price)))
def output(r):
for d in range(len(r)/2):
name = people[d][0]
origin = people[d][1]
out = flights[(origin, destination)][r[2*d]]
ret = flights[(destination, origin)][r[2*d+1]]
print ('%10s%10s %5s-%5s $%3s %5s-%5s $%3s' %
(name, origin,
m2t(out[0]), m2t(out[1]), out[2],
m2t(ret[0]), m2t(ret[1]), ret[2]))
def schedulecost(sol):
totalprice = 0
latestarrival = 0
earliestdep = 24*60
totalwait = 0
for d in range(len(sol)/2):
origin = people[d][1]
out = flights[(origin, destination)][sol[2*d]]
ret = flights[(origin, destination)][sol[2*d+1]]
totalprice += out[2] + ret[2]
if latestarrival < out[1]: latestarrival = out[1]
if earliestdep > ret[0]: earliestdep = ret[0]
for d in range(len(sol)/2):
origin = people[d][1]
out = flights[(origin, destination)][sol[2*d]]
ret = flights[(origin, destination)][sol[2*d+1]]
totalwait += (latestarrival-out[1]) + (ret[0]-earliestdep)
if latestarrival > earliestdep: totalprice += 50
return totalprice + totalwait
def randomopt(domain, costf):
best = sys.maxint
bestr = None
for i in range(1000):
r = [random.randint(domain[i][0], domain[i][1])
for i in range(len(domain))]
cost = costf(r)
if cost < best:
best = cost
bestr = r
return r
def hillclimb(domain, costf):
sol = [random.randint(domain[i][0], domain[i][1])
for i in range(len(domain))]
while True:
neighbours = []
for j in range(len(domain)):
if sol[j] > domain[j][0]:
neighbours.append(sol[0:j] + [sol[j]-1] + sol[j+1:])
if sol[j] < domain[j][1]:
neighbours.append(sol[0:j] + [sol[j]+1] + sol[j+1:])
#get best solution from neighbours
current = costf(sol)
best = current
for j in range(len(neighbours)):
cost = costf(neighbours[j])
if cost < best:
best = cost
sol = neighbours[j]
if best == current:
break
return sol
def annealing(domain, costf, T=10000.0, cool=0.95, step=1):
sol = [random.randint(domain[i][0], domain[i][1])
for i in range(len(domain))]
best = costf(sol)
while T>0.1:
#seelct a index
i = random.randint(0, len(domain)-1)
#select a direction
direction = 0
if random.random() < 0.5: direction = -step
else: direction = step
#create a new solution
newsol = sol[:]
newsol[i] += direction
if newsol[i] < domain[i][0]: newsol[i] = domain[i][0]
if newsol[i] > domain[i][1]: newsol[i] = domain[i][1]
cost = costf(newsol)
if (cost < best or random.random() < pow(math.e, -(cost-best)/T)):
best = cost
sol = newsol
T *= cool
return sol
def genetic(domain, costf, popsize=50, step=1, mutprob=0.2, elite=0.2, maxiter=100):
def mutate(sol):
i = random.randint(0, len(domain)-1)
if random.rand() < 0.5 and sol[i]-step >= domain[i][0]:
return sol[0:i] + [sol[i]-step] + sol[i+1:]
elif sol[i]+step <= domain[i][1]:
return sol[0:i] + [sol[i]+step] + sol[i+1:]
def crossover(sol1, sol2):
i = random.randint(0, len(domain)-1)
return sol1[0:i] + sol2[i:]
#generate population
pop = []
for i in range(popsize):
sol = [random.randint(domain[i][0], domain[i][1])
for i in range(len(domain))]
pop.append(sol)
topelite = int(elite * popsize)
best = pop[0]
for i in range(maxiter):
scores = [(costf(v), v) for v in pop]
scores.sort()
#in case there are some better solutions
#so ensure that it will genetic 10 iterations
if(i > 10 and best == scores[0][1]):
break
best = scores[0][1]
ranked = [v for (s, v) in scores]
#start from elite
pop = ranked[0:topelite]
#add mutation and crossover
while len(pop) < popsize:
if random.random < mutprob:
c = random.randint(0, topelite-1)
pop.append(mutate(ranked[c]))
else:
c1 = random.randint(0, topelite-1)
c2 = random.randint(0, topelite-1)
pop.append(crossover(ranked[c1], ranked[c2]))
scores = [(costf(v), v) for v in pop]
scores.sort()
return scores[0][1]
| true |
a09c00b1e1af6d19b4b460e5784c4ee361adcc1f | Python | cometchuck/spnati | /opponents/monika/helper-scripts/csv2xml/stage.py | UTF-8 | 3,776 | 2.96875 | 3 | [
"MIT"
] | permissive | import re
from .utils import format_interval
from .case import Case
from .ordered_xml import OrderedXMLElement
def parse_stage_name(name, opponent):
name = name.strip().lower()
if name == 'full' or name == 'fully_clothed' or name == 'fully clothed':
return 0
elif name == 'naked' or name == 'nude' or name == '-3':
return opponent.naked_stage()
elif name == 'mast' or name == 'masturbate' or name == 'masturbating' or name == '-2':
return opponent.masturbate_stage()
elif name == 'finish' or name == 'finished' or name == '-1':
return opponent.finished_stage()
else:
# attempt to match a lost-clothing selector:
m = re.match(r'lost(?:\-|\_|\s+)(.+)', name, re.IGNORECASE)
if m is not None:
clothing_stage = opponent.lost_clothing_stage(m.group(1))
if clothing_stage is not None:
return clothing_stage
try:
# try to parse the name as a number directly
return int(name)
except ValueError:
# if all else fails just assume the name is a stage in and of itself
return name
def parse_stage_selector(selector, opponent):
stages = []
if isinstance(selector, str):
for sub_selector in selector.split(','):
sub_selector = sub_selector.strip().lower()
if sub_selector == 'all':
stages = range(opponent.len_stages())
break
interval_match = re.match(r'(.+?)\s*\-\s*(.+)', sub_selector, re.IGNORECASE)
if interval_match is not None:
low = interval_match.group(1)
hi = interval_match.group(2)
low = parse_stage_name(low, opponent)
hi = parse_stage_name(hi, opponent)
if not isinstance(low, int):
raise ValueError("Cannot use special stage in an interval: {}".format(low))
if not isinstance(hi, int):
raise ValueError("Cannot use special stage in an interval: {}".format(hi))
stages.extend(range(low, hi+1))
else:
stages.append(parse_stage_name(sub_selector, opponent))
else:
stages = selector
return frozenset(stages)
def format_stage_set(stage_set):
fragments = []
stages = []
for stage in stage_set:
try:
stages.append(int(stage))
except ValueError:
fragments.append(stage)
stages = sorted(stages)
while len(stages) > 0:
lo = stages[0]
hi_idx = 0
for i in range(1, len(stages)):
if stages[i] != lo+i:
break
hi_idx = i
hi = lo+hi_idx
fragments.append(format_interval((lo, hi)))
del stages[0:hi_idx+1]
return ','.join(fragments)
class Stage(object):
def __init__(self, stage_num):
self.stage_num = int(stage_num)
self.cases = []
@classmethod
def from_line_set(cls, lineset, stage_num):
"""
Create a stage object from cases within a dict keyed by stage sets.
"""
stage = cls(stage_num)
for stage_set, cases in lineset.items():
if stage_num in stage_set:
stage.cases.extend(cases)
return stage
@classmethod
def from_xml(cls, elem):
stage = cls(elem.get('id'))
for case in elem.iter('case'):
stage.cases.append(Case.from_xml(case))
return stage
def to_xml(self):
elem = OrderedXMLElement('stage')
elem.attributes['id'] = str(self.stage_num)
for case in self.cases:
elem.children.append(case.to_xml(self.stage_num))
return elem
| true |
127e4ceacb60a59596b42be674e537d905fe7705 | Python | Ngrbac/appli | /openweather/views.py | UTF-8 | 8,618 | 2.75 | 3 | [] | no_license | import requests
import pytemperature as pyt
from django.shortcuts import HttpResponseRedirect
from django.urls import reverse
from django.contrib import messages
from rest_framework import generics, permissions, status
from rest_framework.views import APIView
from rest_framework.renderers import TemplateHTMLRenderer
from rest_framework.response import Response
from users import views as user_views
from .models import WeatherCity
from . import serializers, forms
# ger_weather: funkcija koja radi backend zahtjev prema OpenWeatherMap API endpointu.
# Prima string parametar te ga spušta na lowercase i uklanja razmake za smanjenje mogućnosti greške.
# APIkey za stranicu je ovdje u plain textu radi demonstracije, inače bi bio u env. varijablama.
def get_weather(city):
apikey = '1eaaf038019381454dc2264f1dddf5b3'
city = city.strip(' ')
params = {'q':city, 'appid':apikey}
url = f'https://api.openweathermap.org/data/2.5/weather/'
weather_req = requests.get(url, params=params)
weather_req = weather_req.json()
return serializers.WeatherSerializer(weather_req).data
# Funkcija koja dohvaća gradove na listi s lijeve strane kako je zadano u opisu zadatka.
# Odabrani gradovi nisu hardcoded u kodu, već su ručno dodani i označeni.
# Označeni su u bazi s featured (bool) atributom koji admini mogu mijenjati,
# te tako jednostavno određivati gradove koji će biti na listi.
def sidelist(request):
queryset = WeatherCity.objects.filter(featured=True)
serializer = serializers.CityListSerializer(queryset, many=True).data
return serializer
'''
Temeljni (home) pogled, kojeg ostali extendaju.
GET/POST requestovima razrađuje funkcionalnosti: POST šalje na detaljan prikaz odabranog grada.
Na njemu je search form, za temeljnu funkciju pretraživanja.
Na svoj template proslijeđuje podatke:
- o gradovima koji su označeni da budu stalno na listi sa strane
- o gradovima koji su u favoritima
- o formi, tj samu formu.
- ako se radi o POST metodi, proslijeđuje keyword argument na drugi prikaz.
'''
class BaseView(APIView):
permission_classes = [permissions.AllowAny,]
renderer_classes = [TemplateHTMLRenderer]
def post(self, request, *args, **kwargs):
if request.method == 'POST':
name = request.POST.get('name')
return HttpResponseRedirect(reverse('city-weather', kwargs={'name':name}))
def get(self, request, *args, **kwargs):
favourite_cities = user_views.favourites(request)
forma = forms.SearchForm()
feat_cities = sidelist(request)
return Response({'feat_cities': feat_cities, 'form': forma, 'favourite_cities': favourite_cities}, template_name="base.html")
### Ovaj pogled daje podatke o vremenu u pojedinom gradu. Koristi funkciju get_weather.
### Jedan od glavnih pogleda.
### Na njemu se nalazi opcija za označavanje favorita ili uklanjanje istih.
class CityWeatherView(APIView):
renderer_classes = [TemplateHTMLRenderer]
permission_classes = [permissions.IsAuthenticatedOrReadOnly,]
def get(self, request, *args, **kwargs):
# Lista unaprijed određenih gradova, kao i favorita
feat_cities = sidelist(request)
favourite_cities = user_views.favourites(request)
fav = False
search_word = self.kwargs['name']
try:
data = get_weather(self.kwargs['name'])
except:
error = True
return Response({'error':error, 'city':search_word, 'favourite_cities':favourite_cities, 'feat_cities':feat_cities}, template_name='wetcity.html')
# Pretvorba temperature - pytemperature
data['main']['temp'] = pyt.k2f(data['main']['temp'])
data['main']['feels_like'] = pyt.k2f(data['main']['feels_like'])
# utvrđivanje je li grad favorit
if favourite_cities:
for favor in favourite_cities:
if data['id'] == favor.api_id:
fav = True
return Response({'data': data, 'city':search_word, 'favourite_cities':favourite_cities, 'fav':fav, 'feat_cities':feat_cities}, template_name='wetcity.html')
# Dodavanje grada u favorite. Ujedno i način dodavanja novih gradova u bazu, po potrebi tek ako netko doda u favorite.
# Prvo radi provjeru postoji li taj grad u bazi, ako ne dodaje ga.
# Nakon što je grad dodan, dodaje se u favorite.
# Za favorite se koristi ManyToManyField na strani grada.
# To je više na više relacija gdje Django sam stvara srednju tablicu.
class FavouriteCity(APIView):
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated:
messages.warning(request, f'Niste prijavljeni. Prijavite se kako biste mogli dodati favorite.')
return HttpResponseRedirect(reverse('city-weather', kwargs={'name':self.kwargs['name']}))
if self.kwargs['name'] and self.kwargs['api_id']:
if not WeatherCity.objects.filter(api_id=self.kwargs['api_id']):
city = WeatherCity(name=self.kwargs['name'], api_id=self.kwargs['api_id'])
if city.name == self.kwargs['name']:
city.save()
elif city.name.lower == self.kwargs['name'].lower:
city.save()
else:
city.name = self.kwargs['name']
city.save()
else:
city = WeatherCity.objects.get(api_id=self.kwargs['api_id'])
user = request.user
city.favourite.add(user)
city.save()
return HttpResponseRedirect(reverse('city-weather', kwargs={'name': city.name}))
# Uklanjanje grada iz favorita.
class UnfavouriteCity(APIView):
permission_classes = [permissions.IsAuthenticated,]
def get(self, request, *args, **kwargs):
if 'api_id' in self.kwargs:
user = request.user
city = WeatherCity.objects.filter(api_id=self.kwargs['api_id']).get()
city.favourite.remove(user)
city.save()
return HttpResponseRedirect(reverse('city-weather', kwargs={'name': city.name}))
### API sekcija: ne koristi se u templateima, no može se koristiti za CRUD funkcije.
### Dozvole su određene za svaku zasebno, osvisno o potencijalnoj opasnosti.
class CityListView(generics.ListAPIView):
permission_classes = [permissions.AllowAny,]
queryset = WeatherCity.objects.all()
serializer_class = serializers.CityListSerializer
class CreateCityView(generics.CreateAPIView):
permission_classes = [permissions.IsAuthenticated,]
queryset = WeatherCity.objects.all()
serializer_class = serializers.CityListSerializer
class UpdateCityView(generics.UpdateAPIView):
permission_classes = [permissions.IsAuthenticated,]
queryset = WeatherCity.objects.all()
serializer_class = serializers.UpdateCitySerializer
class WeatherView(APIView):
permission_classes = [permissions.AllowAny,]
def get(self, request, *args, **kwargs):
if self.kwargs['pk']:
city = WeatherCity.objects.get(pk=self.kwargs['pk'])
try:
data = get_weather(city.name)
return Response({'data': data})
except:
return Response(status=status.HTTP_400_BAD_REQUEST)
class DeleteCityView(generics.DestroyAPIView):
permission_classes = [permissions.IsAdminUser,]
queryset = WeatherCity.objects.all()
serializer_class = serializers.DeleteCitySerializer
class FavUnfavCity(APIView):
serializer_class = serializers.UpdateCitySerializer
permission_classes = [permissions.IsAuthenticated,]
def get(self, request, *args, **kwargs):
if self.kwargs['name'] and self.kwargs['api_id']:
if not WeatherCity.objects.filter(api_id=self.kwargs['api_id']):
city = WeatherCity(name=self.kwargs['name'].capitalize(), api_id=self.kwargs['api_id'])
city.save()
else:
city = WeatherCity.objects.get(api_id=self.kwargs['api_id'])
user = request.user
city.favourite.add(user)
city.save()
return Response(status=status.HTTP_201_CREATED)
def post(self, request, *args, **kwargs):
if self.kwargs['api_id']:
user = request.user
city = WeatherCity.objects.filter(api_id=self.kwargs['api_id']).get()
city.favourite.remove(user)
city.save()
return Response(status=status.HTTP_410_GONE)
| true |
49e69fb07a9fd0914d9afeadff295c7e0e11685e | Python | milamao/coding-exercise | /leetcode_325.py | UTF-8 | 1,162 | 3.4375 | 3 | [] | no_license | class Solution(object):
# Solution 2: dict. O(n)
def maxSubArrayLen(self, nums, k):
max_len = 0
sums = {0: -1} # sum => index
rolling_sum = 0
for i, num in enumerate(nums):
rolling_sum += num
if rolling_sum - k in sums:
max_len = max(max_len, i - sums[rolling_sum - k])
if rolling_sum not in sums:
sums[rolling_sum] = i
return max_len
# Solution 1: prefix sum. time limit exceeded. O(n^2)
'''
def maxSubArrayLen(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
if not nums:
return 0
n = len(nums)
sums = [] # prefix sum
sums.append(nums[0])
for i in range(1, n):
sums.append(sums[-1] + nums[i])
for length in range(n, -1, -1):
for i in range(0, n - length + 1):
if i == 0 and sums[i + length - 1] == k:
return length
if i > 0 and sums[i + length - 1] - sums[i - 1] == k:
return length
return 0
''' | true |
3e03a0c38aa42bd65f8c28a11e0f9a65f941f8e5 | Python | ipeterov/random-stuff | /биржевая программа/Подбор параметров/plot.py | UTF-8 | 341 | 3.125 | 3 | [] | no_license | import matplotlib.pyplot as plt
import numpy
from math import *
def example(x):
return sin(x) / x**(1.001)
def timerow(function, range1, step):
result = []
for i in numpy.arange(range1[0], range1[1], step):
result.append([i, function(i)])
return result
plt.plot(*list(zip(*timerow(example, [-100,100], 0.01)))) | true |
8949297cbe31b5fc7aa8060af6398890079f80e9 | Python | manasabalaji221/Logistic-Regression-Using-Batch-Training- | /logistic_reg.py | UTF-8 | 8,757 | 3.03125 | 3 | [] | no_license |
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(x):
return 1/(1+np.exp(-x))
def predict(X, weights, bias):
linear_model = np.dot(X, weights) + bias
# print(linear_model)
# print(linear_model.shape)
y_predicted = sigmoid(linear_model)
y_predicted_val=[]
# print(y_predicted)
for i in y_predicted:
if i>0.5:
y_predicted_val.append(1)
else:
y_predicted_val.append(0)
return y_predicted_val
def logistic_regression(features, y, n_iters, lr):
n_samples, n_features = features.shape
weights = np.expand_dims(np.zeros(shape=(n_features)),axis=1)
# print(weights.shape)
bias = 0
grads = []
iters=0
costs=[]
e_list=[]
e=1000
for _ in range(n_iters):
iters+=1
linear_model = np.dot(features, weights) + bias
#### Using sigmoid function for activation function
y_predicted = sigmoid(linear_model)
#### Finding the gradient , cost function (Cross Entropy)
cross_entropy = np.dot(features.T, (y_predicted - y))
costs.append(cross_entropy[0])
gradient_dw = (1 / n_samples) * np.dot(features.T, (y_predicted - y))
grads.append(gradient_dw[0])
# print(gradient_dw.shape)
# print(weights.shape)
# print(cross_entropy[0])
db = (1 / n_samples) * np.sum(y_predicted - y)
# print('bias',db)
# weights -= lr * dw
bias -= lr * db
# print(gradient)
weights -= lr * gradient_dw
#### L1 norm of gradient
e_l1 = cross_entropy[0] + lr * (weights[0]+weights[1])
e_list.append(e_l1)
# print(e_l1)
# print(weights)
#### breaking condition
if(e_l1 < 0.001 or e-e_l1==0):
break
e = e_l1
# print(e_l1)
# print('weights',weights)
return weights, bias, grads, e_list, costs, iters
def accuracy(test, pred):
p=list(pred)
# print(p)
# print(len(pred), len(test))
c=0
for i in range(0,len(p)):
if(p[i] == test[i]):
c+=1
acc = c / len(p)
return acc
if __name__ == '__main__':
############# Generating training data
mean1 = np.array([1, 0])
cov1 = np.array([[1, 0.75], [0.75, 1]])
x1, y1 = np.random.multivariate_normal(mean1, cov1, 500).T
mean2 = np.array([0, 1.5])
cov2 = np.array([[1, 0.75], [0.75, 1]])
x2, y2 = np.random.multivariate_normal(mean2, cov2, 500).T
XY1 = list(zip(x1, y1))
XY2 = list(zip(x2, y2))
XY=XY1+XY2
xy_train = np.array(XY)
# print(xy)
S1 = np.zeros(shape=(1000, 3))
x1=np.zeros(shape=(1000,1))
for i in range(0, 500):
S1[i] = [xy_train[i][0],xy_train[i][1],0]
x1[i]=0
for j in range(500, 1000):
# print(j)
S1[j] = [xy_train[j][0],xy_train[j][1],1]
x1[j]=1
# print(S2)
# print(S1.shape)
count1, bins1, ignored1 = plt.hist(xy_train, 1000, density=True)
plt.title('Multivariate Normal random data values\nTraining data:')
plt.show()
############# Generating testing data
mean3 = np.array([1, 0])
cov3 = np.array([[1, 0.75], [0.75, 1]])
x3, y3 = np.random.multivariate_normal(mean3, cov3, 250).T
mean4 = np.array([0, 1.5])
cov4 = np.array([[1, 0.75], [0.75, 1]])
x4, y4 = np.random.multivariate_normal(mean4, cov4, 250).T
XY3 = list(zip(x3, y3))
XY4 = list(zip(x4, y4))
XYt = XY3 + XY4
xy_test = np.array(XYt)
# print(xy)
S2 = np.zeros(shape=(500, 3))
c1=c2=0
x_test=np.zeros(shape=(500,1))
for i in range(0, 250):
S2[i] = [xy_test[i][0], xy_test[i][1], 0]
x_test[i] = 0
c1+=1
for j in range(250, 500):
# print(j)
S2[j] = [xy_test[j][0], xy_test[j][1], 1]
x_test[j]=1
# print(S2)
# print(S2.shape)
# print(x)
count1, bins1, ignored1 = plt.hist(xy_test, 500, density=True)
plt.title('Multivariate Normal random data values\nTesting data:')
plt.show()
# ######Apply Logistic regression(example)
# weights, bias, grads, iters = logistic_regression(xy_train, x1, 100000, lr=0.1)
# # print(weights, bias)
# predicted_val = predict(xy_train, weights, bias)
# # print(predicted_val)
######## 1a learning rate=1
print('Batch Training using lr = 1')
weight_bat1, bias_bat1, grad_bat1, e_l1norm_bat1, cross_entropy_bat1, iters_bat1 = logistic_regression(xy_train, x1, 100000, lr=1)
# print(weights, bias)
predicted_val_bat = predict(xy_test, weight_bat1, bias_bat1)
# print(predicted_val_bat)
acc = 100 * accuracy(x_test, predicted_val_bat)
plt.plot(xy_test)
plt.plot(predicted_val_bat)
plt.title('Batch Training using lr = 1\n(a)Testing data and the Trained Decision Boundary:')
plt.show()
# print(grads_bat1)
# print(iters_bat1)
plt.plot(grad_bat1)
plt.title('Batch Training using lr = 1\n(b)Changes of training loss (cross entropy) w.r.t. iteration:')
plt.show()
# print(weight_bat1)
# print(weight_bat1.shape)
plt.plot(e_l1norm_bat1, color = 'red')
plt.title('Batch Training using lr = 1\n(c)Changes of norm of gradient w.r.t. iteration:')
plt.show()
print('Accuracy is:', acc, '%')
print('No of iterations:', iters_bat1)
######## 1b learning rate = 0.1
print('Batch Training using lr = 0.1')
weight_bat2, bias_bat2, grad_bat2, e_l1norm_bat2, cross_entropy_bat2, iters_bat2 = logistic_regression(xy_train, x1, 100000, lr=0.1)
# print(weights, bias)
predicted_val_bat = predict(xy_test, weight_bat2, bias_bat2)
# print(predicted_val_bat)
acc = 100 * accuracy(x_test, predicted_val_bat)
plt.plot(xy_test)
plt.plot(predicted_val_bat)
plt.title('Batch Training using lr = 0.1\n(a)Testing data and the Trained Decision Boundary:')
plt.show()
# print(grads_bat1)
# print(iters_bat1)
plt.plot(grad_bat2)
plt.title(
'Batch Training using lr = 0.1\n(b)Changes of training loss (cross entropy) w.r.t. iteration:')
plt.show()
# print(weight_bat1)
# print(weight_bat1.shape)
plt.plot(e_l1norm_bat2, color='red')
plt.title('Batch Training using lr = 1\n(c)Changes of norm of gradient w.r.t. iteration:')
plt.show()
print('Accuracy is:', acc, '%')
print('No of iterations:', iters_bat2)
######## 1c learning rate = 0.01
print('Batch Training using lr = 0.01')
weight_bat3, bias_bat3, grad_bat3, e_l1norm_bat3, cross_entropy_bat3, iters_bat3 = logistic_regression(xy_train, x1, 100000, lr=0.01)
# print(weights, bias)
predicted_val_bat = predict(xy_test, weight_bat3, bias_bat3)
# print(predicted_val_bat)
acc = 100 * accuracy(x_test, predicted_val_bat)
plt.plot(xy_test)
plt.plot(predicted_val_bat)
plt.title(
'Batch Training using lr = 0.01\n(a)Testing data and the Trained Decision Boundary:')
plt.show()
# print(grads_bat1)
# print(iters_bat1)
plt.plot(grad_bat3)
plt.title(
'Batch Training using lr = 0.01\n(b)Changes of training loss (cross entropy) w.r.t. iteration:')
plt.show()
# print(weight_bat1)
# print(weight_bat1.shape)
plt.plot(e_l1norm_bat3, color='red')
plt.title('Batch Training using lr = 1\n(c)Changes of norm of gradient w.r.t. iteration:')
plt.show()
print('Accuracy is:', acc, '%')
print('No of iterations:', iters_bat3)
######## 1d learning rate = 0.001
print('Batch Training using lr = 0.001')
weight_bat4, bias_bat4, grad_bat4, e_l1norm_bat4, cross_entropy_bat4, iters_bat4 = logistic_regression(xy_train, x1, 100000, lr=0.001)
# print(weights, bias)
predicted_val_bat = predict(xy_test, weight_bat4, bias_bat4)
# print(predicted_val_bat)
acc = 100 * accuracy(x_test, predicted_val_bat)
plt.plot(xy_test)
plt.plot(predicted_val_bat)
plt.title(
'Batch Training using lr = 0.001\n(a)Testing data and the Trained Decision Boundary:')
plt.show()
# print(grads_bat1)
# print(iters_bat1)
plt.plot(grad_bat4)
plt.title(
'Batch Training using lr = 0.001\n(b)Changes of training loss (cross entropy) w.r.t. iteration:')
plt.show()
# print(weight_bat1)
# print(weight_bat1.shape)
plt.plot(e_l1norm_bat4, color='red')
plt.title('Batch Training using lr = 0.001\n(c)Changes of norm of gradient w.r.t. iteration:')
plt.show()
print('Accuracy is:', acc, '%')
print('No of iterations:', iters_bat4)
| true |
071391bac600e57b7fc5f3536287bd0a0eba8d0b | Python | aravindas4/python-projects | /ch2-thu12/uppercase_strip.py | UTF-8 | 116 | 3.109375 | 3 | [] | no_license | def uppercase(string):
return string.upper()
def strip_symbols(string):
return string.strip(string[0])
| true |
462493e496720d12964397ce6fc560fbd2abc756 | Python | GaiAshk/AI-Berkeleys-pacman-game | /project1/search.py | UTF-8 | 10,593 | 3.75 | 4 | [] | no_license | # search.py
# ---------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
"""
In search.py, you will implement generic search algorithms which are called
by Pacman agents (in searchAgents.py).
"""
import util
class SearchProblem:
"""
This class outlines the structure of a search problem, but doesn't implement
any of the methods (in object-oriented terminology: an abstract class).
You do not need to change anything in this class, ever.
"""
def getStartState(self):
"""
Returns the start state for the search problem
"""
util.raiseNotDefined()
def isGoalState(self, state):
"""
state: Search state
Returns True if and only if the state is a valid goal state
"""
util.raiseNotDefined()
def getSuccessors(self, state):
"""
state: Search state
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
util.raiseNotDefined()
def getCostOfActions(self, actions):
"""
actions: A list of actions to take
This method returns the total cost of a particular sequence of actions. The sequence must
be composed of legal moves
"""
util.raiseNotDefined()
def tinyMazeSearch(problem):
"""
Returns a sequence of moves that solves tinyMaze. For any other
maze, the sequence of moves will be incorrect, so only use this for tinyMaze
"""
from game import Directions
s = Directions.SOUTH
w = Directions.WEST
return [s,s,w,s,w,w,s,w]
def depthFirstSearch(problem):
"""
Search the deepest nodes in the search tree first
[2nd Edition: p 75, 3rd Edition: p 87]
Your search algorithm needs to return a list of actions that reaches
the goal. Make sure to implement a graph search algorithm
[2nd Edition: Fig. 3.18, 3rd Edition: Fig 3.7].
To get started, you might want to try some of these simple commands to
understand the search problem that is being passed in:
print "Start:", problem.getStartState()
print "Is the start a goal?", problem.isGoalState(problem.getStartState())
print "Start's successors:", problem.getSuccessors(problem.getStartState())
"""
startState = problem.getStartState()
# list of all the state on the grid pacman visited
visitedPositions = [startState]
path = _dfsHelper(problem, startState, util.Stack(), visitedPositions)
return path.list
def _dfsHelper(problem, nodeToFringe, currentPath, visitedPositions):
successors = problem.getSuccessors(nodeToFringe)
if successors is not None:
successors = successors[::-1] # reversing the successors input for requested output
for neighbor in successors:
# if one of the successors is a goal, return a path to it (without fringing it)
if (problem.isGoalState(neighbor[0])) is True:
currentPath.push(neighbor[1])
return currentPath
for neighbor in successors:
# if a path is valid, return it; else return None
if neighbor[0] not in visitedPositions:
visitedPositions.append(neighbor[0])
currentPath.push(neighbor[1])
res = _dfsHelper(problem, neighbor[0], currentPath, visitedPositions)
if res is not None:
return res
# this is not a path to the goal; backtrack
currentPath.pop()
return None
def breadthFirstSearch(problem):
"""
Search the shallowest nodes in the search tree first.
[2nd Edition: p 73, 3rd Edition: p 82]
"""
startState = problem.getStartState()
# list of all the state on the grid pacman visited
visitedPositions = [startState]
# starting at goal point
if problem.isGoalState(startState):
return []
# a list of list that holds all the positions and directions of all the paths the bfs explores
queue = [ [(startState, 'dummy', 0)] ] # a queue of paths (partial/complete paths)
# final optimal path to the destination
goalPath = []
# a flag to enable braking from the loop when destination is reached
flag = False
# number of corners out of four the BFS touched
while queue:
# pop the first item from the queue
nodeToFringe = queue.pop(0)
# update visitedPositions from the nodeToFringe (it is the last element in the list
# and first element in the tuple
# if isCornersProblem:
# lastPosition = nodeToFringe[-1][0][0]
# else:
# lastPosition = nodeToFringe[-1][0]
visitedPositions.append(nodeToFringe[-1][0])
# iterate on all the possible successor nodes
successors = problem.getSuccessors(nodeToFringe[-1][0])
for child in successors:
childPosition = child[0]
# if the successor node is in the visitedPositions or already explored by other bfs search continue
# else enter this if statement
if childPosition not in visitedPositions:
# copy newPath so that its address in memory is different then nodeToFringe's address
newPath = nodeToFringe[:]
newPath.append(child)
# if goal path is reached break
if problem.isGoalState(child[0]):
goalPath = newPath
flag = True
break
# else add this newPath as the last element in the list
queue.append(newPath)
visitedPositions.append(childPosition)
if flag:
break
# return an list of all the directions
result = [direction[1] for direction in goalPath][1:]
return result
def uniformCostSearch(problem):
"Search the node of least total cost first."
startState = problem.getStartState()
# list of all the state on the grid pacman visited
visitedPositions = [startState]
# check if we start at goal state
if (problem.isGoalState(startState)) is True:
return []
# new cost function that works with on the list we enter the queue
newCostFunction = lambda path: problem.getCostOfActions(map(lambda (a, b, c): b, path[1:]))
# init a priority queue with our new cost function
queue = util.PriorityQueueWithFunction(newCostFunction)
queue.push([(startState, 'dummy', 0)])
# final optimal path to the destination
goalPath = []
while not queue.isEmpty():
# pop the first item from the queue
nodeToFringe = queue.pop()
# if node to fringe is the goal state, break
if problem.isGoalState(nodeToFringe[-1][0]):
goalPath = nodeToFringe
break
# update visitedPositions from the nodeToFringe (it is the last element in the list
# and first element in the tuple
visitedPositions.append(nodeToFringe[-1][0])
# iterate on all the possible successor nodes
successors = problem.getSuccessors(nodeToFringe[-1][0])
for child in successors:
# if the successor node is in the visitedPositions or already explored by other better path
# else enter this if statement
if child[0] not in visitedPositions:
# copy newPath so that its address in memory is different then nodeToFringe's address
newPath = nodeToFringe[:]
# add child as and node that has not been expended, only generated
newPath.append(child)
queue.push(newPath)
# return an array of all the directions
return [direction[1] for direction in goalPath][1:]
def nullHeuristic(state, problem=None):
"""
A heuristic function estimates the cost from the current state to the nearest
goal in the provided SearchProblem. This heuristic is trivial.
"""
return 0
def aStarSearch(problem, heuristic=nullHeuristic):
"Search the node that has the lowest combined cost and heuristic first."
# heuristic setup heuristic(state, problem)
startState = problem.getStartState()
# list of all the state on the grid pacman visited
visitedPositions = [startState]
# check if we start at goal state
if (problem.isGoalState(startState)) is True:
return []
# new cost function that works with on the list we enter the queue
newCostFunction = lambda path: problem.getCostOfActions(map(lambda (a, b, c): b, path[1:])) +\
heuristic(path[-1][0], problem)
# init a priority queue with our new cost function
queue = util.PriorityQueueWithFunction(newCostFunction)
queue.push([(startState, 'dummy', 0)])
# final optimal path to the destination
goalPath = []
while not queue.isEmpty():
# pop the first item from the queue
nodeToFringe = queue.pop()
# if node to fringe is the goal state, break
if problem.isGoalState(nodeToFringe[-1][0]):
goalPath = nodeToFringe
break
# update visitedPositions from the nodeToFringe (it is the last element in the list
# and first element in the tuple
visitedPositions.append(nodeToFringe[-1][0])
# iterate on all the possible successor nodes
successors = problem.getSuccessors(nodeToFringe[-1][0])
for child in successors:
# if the successor node is in the visitedPositions or already explored by other better path
# else enter this if statement
if child[0] not in visitedPositions:
# copy newPath so that its address in memory is different then nodeToFringe's address
newPath = nodeToFringe[:]
# add child as and node that has not been expended, only generated
newPath.append(child)
queue.push(newPath)
# return an array of all the directions
return [direction[1] for direction in goalPath][1:]
# Abbreviations
bfs = breadthFirstSearch
dfs = depthFirstSearch
astar = aStarSearch
ucs = uniformCostSearch
| true |
8285c89a80b6c0f6ee5fc90654cbbe55a9ecde31 | Python | john-veillette/mne-ari | /mne_ari/ari/_permutation.py | UTF-8 | 1,918 | 3.015625 | 3 | [
"BSD-3-Clause"
] | permissive | from scipy.stats import ttest_1samp, ttest_ind
from mne.utils import check_random_state
import numpy as np
def _permutation_1samp(X, n_permutations = 10000, alternative = 'two-sided',
seed = None, statfun = None):
'''
computes the permutation distribution of p-values from ttest_1samp
'''
_, p_obs = ttest_1samp(X, 0, axis = 0, alternative = alternative)
p_dist = [p_obs]
rng = check_random_state(seed)
for i in range(n_permutations):
# randomly flip sign of observations
flips = rng.choice([-1, 1], size = X.shape[0])
X = X * flips[:, np.newaxis]
# and recompute test statistic
if statfun is None:
_, p_obs = ttest_1samp(X, 0, axis = 0, alternative = alternative)
else:
p = statfun(X)
p_dist.append(p_obs)
return np.stack(p_dist, axis = 1) # n_tests x (1 + n_perm)
def _permutation_ind(X, n_permutations = 10000, alternative = 'two-sided',
seed = None, statfun = None):
'''
permutation distribution of parametric p-values from ttest_ind
'''
if len(X) != 2 and statfun is None:
raise ValueError("You're trying do do a two-sample test " +
"with a number of samples that isn't two! If X is list/tuple, " +
"it must be of length 2.")
_, p_obs = ttest_ind(X[0], X[1], axis = 0, alternative = alternative)
p_dist = [p_obs]
n = X[0].shape[0] # number of observations just for first sample
X = np.concatenate(X, axis = 0)
idxs = np.arange(X.shape[0])
rng = check_random_state(seed)
for i in range(n_permutations):
rng.shuffle(idxs)
perm_X = X[idxs]
X0 = perm_X[:n]
X1 = perm_X[n:]
if statfun is None:
_, p = ttest_ind(X0, X1, axis = 0, alternative = alternative)
else:
p = statfun([X0, X1])
p_dist.append(p)
return np.stack(p_dist, axis = 1) # n_tests x (1 + n_perm) | true |
f6cbc4f8fa6a36a969111bb4ce108ad3865d8aaa | Python | rafaelperazzo/programacao-web | /moodledata/vpl_data/476/usersdata/316/111018/submittedfiles/Av2_Parte3.py | UTF-8 | 314 | 3.53125 | 4 | [] | no_license | # -*- coding: utf-8 -*-
na=int(input('digite o numero de elementos da lista a: '))
nb=int(input('digite o numero de elementos da lista b: '))
a=[]
b=[]
for i in range(0,na,1):
a.append(float(input('digite um elemento de a: ')))
for i in range(0,nb,1):
b.append(float(input('digite um elemento de b: ')))
| true |
0aca9b0fdb8757c8cf767a17d22d4861d62041a9 | Python | depixusgenome/trackanalysis | /src/scripting/confusion.py | UTF-8 | 12,125 | 2.734375 | 3 | [] | no_license | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"Compute confusion matrix using the experiments and the real sequence"
from inspect import getmembers
from typing import (Union, List, Sequence, Iterator, Tuple, Any,
Iterable, NamedTuple, Optional)
import numpy as np
import pandas as pd
from scipy.stats import percentileofscore
from sequences import Strand, peaks as _peaks, Translator
from sequences.io import LNAHairpin
from utils import initdefaults
def trackoligo(name:str, oligos: Sequence[str], reference = 'OR3') -> str:
"returns the oligo associated to a track"
lst = {oli for oli in oligos if oli.upper() in name.upper()}
if not lst:
raise KeyError(f"Can not find the oligo corresponding to track {name}"
" in the set of oligos {oligos}")
if len(lst) == 1 and next(iter(lst)) == reference:
return reference
lst.discard(reference)
return next(i for i in oligos if i in lst)
def oligopeaks(oligo: Union[str, Iterable[str]], seq: LNAHairpin,
withref = True,
hpname: str = None,
delta = (30, 41)) -> Tuple[np.ndarray, np.ndarray]:
"""
compute the list of theoretical peaks for oligo
input: oligo is a string
seq is a theoretical object
'full': string with the full sequence
'target': string with the target sequence
'oligo': string with the reference oligo
withref is True if we output the peaks of the reference oligo
False if not
"""
#reverse complement of oligo
#find the positions and orientations of oli in the full sequence
if isinstance(oligo, str):
oli = _peaks(seq.full, Translator.reversecomplement(oligo))
else:
oli = _peaks(seq.full, [Translator.reversecomplement(i) for i in oligo])
#keep the positions in target
if hpname and seq.target != seq.full:
fulls = _peaks(seq.full, seq.target)['position'][0]
rng = fulls-len(seq.target)-delta[0], fulls + delta[1]
oli = oli[(oli['position'] >= rng[0]) & (oli['position'] <= rng[1])]
#if withref then return array of all peaks of the reference and the peaks of oligo
if withref:
ref = _peaks(seq.full, seq.references)
oli = np.sort(np.append(oli, ref[ref['orientation']]))
return (oli['position'][oli['orientation']], oli['position'][~oli['orientation']])
class ConfusionMatrix:
"""
Arguments required to create a confusion matrix
"""
oligos: List[str] = []
seq = LNAHairpin()
ioc = 1
tolerance = 0.01, 0.01
rule = 'theoretical_interval'
brother = 3
@initdefaults(frozenset(locals()))
def __init__(self, **_):
pass
def detection(self, data:pd.DataFrame, **kwa) -> pd.DataFrame:
"returns the detection dataframe"
return DetectionFrameCreator.dataframe(self, data, **kwa)
def confusion(self, data:pd.DataFrame, **kwa) -> 'LNAHairpinDataFrameResults':
"returns the detection dataframe"
return LNAHairpinDataFrameCreator.results(self, data, **kwa)
class DataFrameCreator(ConfusionMatrix):
"Creates dataframes"
theo: np.ndarray
def __init__(self, config):
super().__init__(**config.__dict__)
@classmethod
def dataframe(cls, config: ConfusionMatrix, data: pd.DataFrame, **kwa) -> pd.DataFrame:
"returns a dataframe for all tracks"
size = -len('column')
out = tuple(tuple(i) for i in cls.iterate(config, data, **kwa))
data = {j[:size]: [k[i] for k in out]
for i, (j,_) in enumerate(cls._columns(cls))}
return pd.DataFrame(data)
@staticmethod
def _columns(itm):
return (i for i in getmembers(itm) if i[0].endswith('column') and callable(i[1]))
def groupbyiterate(self, data: pd.core.groupby.DataFrameGroupBy
) -> Iterator[Iterator]:
"iterates over the provided groupby"
cols = [i for _, i in self._columns(self)]
return ((fcn(*self.lineargs(info)) for fcn in cols) for info in data)
@classmethod
def iterate(cls, config: ConfusionMatrix, data: pd.DataFrame, **_):
"""
iterates over all lines
"""
raise NotImplementedError()
def lineargs(self, info:Tuple[Any, pd.DataFrame]):
"""
returns args needed by column methods
"""
raise NotImplementedError()
class DetectionFrameCreator(DataFrameCreator):
"""
Creates the detection dataframe
"""
def __init__(self, config, trackname:str, strand: Strand, hptarget: str) -> None:
super().__init__(config)
self.trackname = trackname
self.oligoname = next(i for i in self.oligos if i.upper() in trackname.upper())
self.strand = Strand(strand)
theo = oligopeaks(trackoligo(trackname, self.oligos),
self.seq, hptarget)
strands = np.arange(2)[::(1 if strand.value else -1)]
self.doublebind = np.min(np.abs(theo[strands[0]].T - theo[strands[1]][:, None]),
axis = 0) < self.brother
self.theo = theo[strands[0]]
# pylint: disable=arguments-differ
@classmethod
def iterate(cls, # type: ignore
config: ConfusionMatrix, data: pd.DataFrame,
hptarget = 'target') -> Iterator[Iterator]:
"""
iterates over all lines
"""
tracks = data.reset_index().track.unique()
data = data.reset_index().set_index('track')
for trk in tracks:
grp = data.loc[trk].groupby('peakposition')
yield from cls(config, trk, Strand.positive, hptarget).groupbyiterate(grp)
yield from cls(config, trk, Strand.negative, hptarget).groupbyiterate(grp)
def lineargs(self, info:Tuple[float, pd.DataFrame]) -> Tuple[int, float, pd.DataFrame]:
"""
returns args needed by column methods
"""
idtheo = np.searchsorted(self.theo, info[0])
return (idtheo + np.argmin(np.abs(self.theo[idtheo:idtheo+1]-info[0])),
info[0], info[1])
def rulecolumn(self, *_) -> str:
"the rule name"
return self.rule
def trackcolumn(self, *_) -> str:
"the track name"
return self.trackname
def oligocolumn(self, *_) -> str:
"the oligo name"
return self.oligoname
def theoposcolumn(self, idtheo: int, *_) -> int:
"the theoretical positions in bases"
return self.theo[idtheo]
def strandcolumn(self, *_) -> str:
"the strand name"
return self.strand.name
def totalpeakscolumn(self, *_) -> int:
"the number of theoretical peaks on the current strand"
return len(self.theo)
@staticmethod
def expposcolumn(_, exppos: float, *__) -> float:
"the experimental positions in µm"
return exppos
@staticmethod
def peaknbcolumn(idtheo: int, *_) -> int:
"the index of the theoretical peak on the current strand"
return idtheo
def distcolumn(self, idtheo:int, exppos:float, _) -> float:
"the experimental distance from the theory"
return exppos-self.theo[idtheo]
def doublebindingcolumn(self, idtheo: int, *_) -> bool:
"whether there could be a binding on both strands at the same time"
return self.doublebind[idtheo]
def detectioncolumn(self, idtheo: int, _, group:pd.DataFrame) -> bool:
"whether there could be a binding on both strands at the same time"
avg = group.avg.values
theo = self.theo[idtheo]
return ((percentileofscore(avg, theo - self.ioc)
-percentileofscore(avg, theo + self.ioc))*1e-2
>= self.tolerance[int(self.strand.value)])
@staticmethod
def hybratecolumn(_, __, group:pd.DataFrame) -> float:
"whether there could be a binding on both strands at the same time"
return group.hybridisationrate.values[0]
@staticmethod
def hybtimecolumn(_, __, group:pd.DataFrame) -> float:
"whether there could be a binding on both strands at the same time"
return group.averageduration.values[0]
class LNAHairpinDataFrameResults(NamedTuple):
data : pd.DataFrame
confusion : pd.DataFrame
truepos : int
falsepos : int
trueneg : int
falseneg : int
class LNAHairpinDataFrameCreator(DataFrameCreator):
"creates detailed dataframe"
def __init__(self, config,
references: Union[str, List[str]] = None,
strand = Strand.positive) -> None:
super().__init__(config)
if references is None:
references = self.seq.references
self.references = (oligopeaks(references, self.seq) # type: ignore
[0 if strand.value else 1])
@classmethod
def iterate(cls, # type: ignore # pylint: disable=arguments-differ
config: ConfusionMatrix, data: pd.DataFrame,
**kwa) -> Iterator[Iterator]:
"""
iterates over all lines
"""
yield from cls(config, **kwa).groupbyiterate(data.groupby(['theopos', 'track']))
@classmethod
def results(cls, config: ConfusionMatrix, data: pd.DataFrame,
confusionindex = 'confusionstate',
confusioncolumns = ('count',),
**kwa) -> LNAHairpinDataFrameResults:
"""
creates and returns all results
"""
data = cls.dataframe(config, data, **kwa)
confusion = pd.crosstab(index = data[confusionindex],
columns = list(confusioncolumns))
def _count(name):
try:
return confusion.loc[name]['count']
except KeyError:
return 0
counts = (_count(i) for i in ('FN', 'FP', 'TN', 'TP'))
return LNAHairpinDataFrameResults(data, confusion, *counts)
@staticmethod
def lineargs(info:Tuple[Tuple[float, str], pd.DataFrame]) -> Tuple[float, str, pd.DataFrame]:
"""
returns args needed by column methods
"""
return info[0][0], info[0][1], info[1]
@staticmethod
def trackcolumn(_, track:str, *__) -> str:
"the track name"
return track
@staticmethod
def theoposcolumn(theopos: int, *_) -> int:
"the theoretical positions in bases"
return theopos
@staticmethod
def expposcolumn(_, __, grp: pd.DataFrame) -> Optional[float]:
"the experimental positions in µm"
try:
return grp.exppos[grp.detection].first
except IndexError:
return None
@staticmethod
def oligocolumn(_, __, grp: pd.DataFrame) -> str:
"the oligo name"
return grp.oligo.values[0]
@staticmethod
def strandcolumn(_, __, grp: pd.DataFrame) -> str:
"the strand name"
return grp.strand.values[0]
@staticmethod
def confusionstatecolumn(_, __, grp: pd.DataFrame) -> str:
"returns the state of the peak : false/true positive/negative"
state = np.any(grp.detection)
if Strand(grp.strand.values[0]).value:
return 'TP' if state else 'FN'
if state:
hasdouble = np.any(grp[['detection', 'doublebinding']].sum(axis=1).isin([2]))
return 'TN' if hasdouble else 'FP'
return 'TN'
@staticmethod
def goodestimatorscolumn(_, __, grp: pd.DataFrame) -> int:
"returns the number of good estimators"
cnt = grp.detection.sum()
return cnt if Strand(grp.strand.values[0]).value else len(grp) - cnt
@staticmethod
def estimatorscolumn(_, __, grp: pd.DataFrame) -> int:
"returns the number of good estimators"
return len(grp)
def isrefcolumn(self, pos: int, *_) -> bool:
"returns the number of good estimators"
return pos in self.references
| true |
7b793120977e867c8bc63f7a6f08c11e25a48cc4 | Python | bopopescu/python-cgi-monitor | /tuesday-service-server-backup/main/service-return.py | UTF-8 | 1,860 | 3.125 | 3 | [] | no_license |
class Candy:
def __init__(self, type, fname, lname):
self.type = type
self.fname =fname
self.lname = lname
self.operation()
def operation(self):
option = {
'candy': self.hello_candy,
'sweet': self.hello_sweet,
'chocolate': self.hello_chocolate,
}
result, text = option[self.type](self.fname, self.lname)
print '--------- RESULT -------'
print result
print '--------- text -------'
print text
def hello_candy(self, fname, lname):
return fname+lname, "Hello candy {} {}".format(fname, lname)
def hello_sweet(self, fname, lname):
return lname + fname, "Hello sweet {} {}".format(lname, fname)
def hello_chocolate(self, fname, lname):
return lname + fname, None
# helen = Candy('chocolate', 'pacharapol', 'deesawat')
import socket
import time
def tcp(destination, destination_port):
stdout = "status={status_final}"
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error as err:
print "socket creation failed with error %s" % (err)
status_final = 3
regex = {'http://': '', 'https://': ''}
destination = reduce(lambda a, kv: a.replace(*kv), regex.iteritems(), destination)
try:
host_ip = socket.gethostbyname(destination)
except socket.gaierror:
status_final = 3
s.settimeout(1)
try:
start_time = time.time()
s.connect((host_ip, destination_port))
end_time = time.time()
status_final = 1
stdout += ", rtt={rtt}".format(rtt=(end_time - start_time) * 1000)
except socket.timeout:
status_final = 2
finally:
s.close()
return stdout.format(status_final=status_final)
hello = tcp('172.217.160.36', 443)
print hello | true |
1d05285fee48d29ea5f70b8385c18b379bc25966 | Python | Aasthaengg/IBMdataset | /Python_codes/p03363/s598493115.py | UTF-8 | 511 | 3.03125 | 3 | [] | no_license | import collections
from operator import mul
from functools import reduce
def cmb(n,r):
r = min(n-r,r)
if r == 0: return 1
over = reduce(mul, range(n, n - r, -1))
under = reduce(mul, range(1,r + 1))
return over // under
n = int(input())
li = list(map(int,input().split()))
for i in range(n-1):
li[i+1] += li[i]
ans = li.count(0) #0だったらそこまでの和が1つの正解
c = list(collections.Counter(li).values())
for i in c:
if i > 1:
ans += cmb(i,2)
print(ans)
| true |
ea32bbb53c202fd8ad103570d35b324db1c4c49e | Python | LouisSR/Raspberry | /Python/ImageAquisition.py | UTF-8 | 3,864 | 2.703125 | 3 | [] | no_license | #! /usr/bin/python
import io
import time
from threading import Thread, Lock ,Event
import picamera
import sys
import cv2
import numpy as np
from blobDetection import BlobDetection
#Initialization
iteration = 0
lock = Lock()
pool = []
BestBlob = []
class ImageProcessor(Thread):
def __init__(self, color, debug=False):
super(ImageProcessor, self).__init__()
self.stream = io.BytesIO()
self.event = Event()
self.terminated = False
self.process_time = 0
self.color = color
self.debug = debug
self.blob = BlobDetection(debug=False, path="./images/test/")
self.start()
def run(self):
# This method runs in a separate thread
global iteration, lock, pool, BestBlob
while not self.terminated:
# Wait for an image to be written to the stream
if self.event.wait(1):
try:
self.stream.seek(0)
# Read the image and do some processing on it
# Construct a numpy array from the stream
data = np.fromstring(self.stream.getvalue(), dtype=np.uint8)
image = cv2.imdecode(data, 1)
if self.debug:
print '\nIteration: ', iteration,
if iteration%10: #Save every 10th image
blob_detected = self.blob.detect(image, self.color, save=False)
else:
blob_detected = self.blob.detect(image, self.color, save=True)
if blob_detected:
BestBlob = [self.blob.best_keypoint_x, self.blob.best_keypoint_y]
BestBlob = [self.blob.best_keypoint_x, 150]
if self.debug:
print ' Blob: ', BestBlob
else:
BestBlob = []
print " No blob"
iteration += 1
finally:
# Reset the stream and event
self.stream.seek(0)
self.stream.truncate()
self.event.clear()
# Return ourselves to the pool
with lock:
pool.append(self)
class ImageAquisition(Thread):
def __init__(self, resolution, framerate, color, resize_factor=1, debug=False):
super(ImageAquisition, self).__init__()
print "Camera Init"
global pool
self.nb_cores = 3
pool = [ImageProcessor(color, debug=debug) for i in range(self.nb_cores)]
self.debug = debug
self.camera = picamera.PiCamera()
self.camera.framerate = framerate
self.camera.resolution = resolution
self.resized = (resolution[0]/resize_factor, resolution[1]/resize_factor)
#camera.start_preview()
time.sleep(2)
self.aquire = True
def getBlob(self):
#This function retunrs Blob coordinates as integers [-100,100]
global BestBlob
if BestBlob:
blob = [ 100 * BestBlob[0] / self.resized[0], 100 * BestBlob[1] / self.resized[1] ]
else:
blob = BestBlob
return blob
def run(self):
try:
self.camera.capture_sequence(self.streams(), use_video_port=True, resize=self.resized)
finally:
if self.debug:
print "Cam close"
self.camera.close()
def stop(self):
global lock, pool, iteration
self.aquire = False
time.sleep(0.5)
if self.debug:
print"Thread Cam exiting..."
# Wait until every thread is in the pool
while len(pool) != self.nb_cores:
time.sleep(0.1)
# Shut down the processors in an orderly fashion
while pool:
with lock:
processor = pool.pop()
processor.terminated = True
processor.join()
if self.debug:
print"Done"
return iteration
def streams(self):
while self.aquire:
with lock:
if pool:
processor = pool.pop()
else:
processor = None
if processor:
yield processor.stream
processor.event.set()
else:
# When the pool is starved, wait a while for it to refill
time.sleep(0.05)
if __name__ == '__main__':
#Constants
framerate = 2
capture_resolution = (800,600)
resize_factor = 2
color = int(sys.argv[1])
aquisition = ImageAquisition(capture_resolution,framerate, color, resize_factor=resize_factor,debug=True)
print 'Started Aquisition'
aquisition.start()
time.sleep(5)
print 'Stopping Aquisition'
aquisition.stop()
print 'End of Processing'
| true |
207c9db14ff0407f74c8f78e200d47aa51b7c03c | Python | Kissarr/Python | /Trash/M7.py | UTF-8 | 203 | 3.625 | 4 | [] | no_license | def is_prime(a):
i = 2
while i < a:
if a % i == 0:
print("False")
return i
else:
i += 1
print("True")
a = int(input("4islo "))
is_prime(a) | true |
dc5cecc6182f9af5a8a0e347db7d88ced3b41fe8 | Python | masteroppgave/topic_model | /models/topic_model.py | UTF-8 | 1,723 | 2.953125 | 3 | [] | no_license | import logging
import gensim
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
def generate_lda_topics(num_topics, corpus=None, dictionary=None, passes=1):
print("=== GENERATING LDA TOPICS ===")
print("=============================")
print("Number of topics: " + str(num_topics))
print("Number of passes: " + str(passes))
if not dictionary:
print("USING DEFAULT 29jan_tweets DICTIONARY")
dictionary = gensim.corpora.Dictionary.load("/tmp/29jan_tweets.dict")
if not corpus:
print("USING DEFAULT 29jan_tweets CORPUS")
corpus = gensim.corpora.MmCorpus("/tmp/29jan_tweets.mm")
lda = gensim.models.LdaModel(corpus=corpus, id2word=dictionary, num_topics=num_topics, passes=passes)
# To get topic mixture for document:
#topic_mixture = lda[dictionary.doc2bow(["love", "write", "inspir", "due", "professor", "date", "essay"])]
#print("===== TOPIC MIXTURE=====")
#print(topic_mixture)
# Update model: lda.update(other_corpus)
topic_list = lda.show_topics(num_topics)
return (dict(topic_list))
def generate_hdp_topics(num_topics, corpus=None, dictionary=None):
if not dictionary:
print("USING DEFAULT 29jan_tweets DICTIONARY")
dictionary = gensim.corpora.Dictionary.load("/tmp/29jan_tweets.dict")
if not corpus:
print("USING DEFAULT 29jan_tweets CORPUS")
corpus = gensim.corpora.MmCorpus("/tmp/29jan_tweets.mm")
hdp = gensim.models.HdpModel(corpus, dictionary)
topic_list = hdp.show_topics(topics=num_topics)
_dict = {}
for element in topic_list:
index, element = element.split(":")
index = int(index[-1])
element = element.strip()
_dict[index] = element
return _dict
if __name__ == "__main__":
print generate_lda_topics(10)
| true |
ca499d1dd802dd7847a07d172148fac3c20c7e75 | Python | do-park/baekjoon | /4386_prim.py | UTF-8 | 721 | 3.0625 | 3 | [] | no_license | # BOJ 4386 별자리 만들기_prim
from heapq import heappush, heappop
INF = 1e9
def prim():
dist = [INF] * N
q = []
heappush(q, [0, 0])
while q:
cost, pos = heappop(q)
if dist[pos] == INF:
dist[pos] = cost
for i in range(N):
if dist[i] == INF:
heappush(q, [adj[pos][i], i])
return sum(dist)
N = int(input())
stars = []
for n in range(N):
x, y = map(float, input().split())
stars.append((x, y))
adj = [[0] * N for _ in range(N)]
for i in range(N):
for j in range(i + 1, N):
adj[i][j] = adj[j][i] = round(((stars[i][0] - stars[j][0]) ** 2 + (stars[i][1] - stars[j][1]) ** 2) ** (1/2), 2)
print(prim()) | true |
7977ef31da6e35bd8d312d5c6dbccba48a5f3467 | Python | karthikeyan-17/Simple-Python-program | /Reverse the number.py | UTF-8 | 180 | 3.953125 | 4 | [] | no_license | ''' Find reverse of given number '''
rev = 0
n=int(input("Enter the reverse number:"))
while (n > 0):
dig= n % 10
rev = rev * 10 + dig
n = n // 10
print(rev) | true |
346bd1690336d1907c7f378758bf950139db1d56 | Python | zhouxiumin/projecteuler | /python/euler23_new.py | UTF-8 | 531 | 2.90625 | 3 | [
"Apache-2.0"
] | permissive | #coding=utf-8
limit=28123
#limit=1000
AbundantList=[]
NonAbundantList=[]
test=[0]*(limit+1)
def isAbundantNumber(n):
tsum=0
for i in range(1,n/2+1):
if n%i==0:
tsum+=i
if tsum>n:
return True
return False
for i in range(1,limit+1):
if isAbundantNumber(i):
AbundantList.append(i)
for i in AbundantList:
for j in AbundantList:
if i+j<=limit:
test[i+j]=1
else:
break
for i in range(1,limit+1):
if test[i]==0:
NonAbundantList.append(i)
print AbundantList
print NonAbundantList
print sum(NonAbundantList)
| true |
f753b06dab9de0306e61f62ce50500983efc2f71 | Python | ChristianDosSantos/PDI_Estiramiento_y_ecualizaci-n_de_histogramas | /Proceso1.py | UTF-8 | 8,052 | 2.890625 | 3 | [
"MIT"
] | permissive | import cv2
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.image as mpimg
# Constants section
bi = 255
ai = 0
colors = ('b' , 'g' ,'r')
pixPercent = 0.005
# Inicio del Programa
src = cv2.imread('images/imagenpdi1.jpg')
hist1 = cv2.calcHist([src],[0],None,[256],[0,256])
hist2 = cv2.calcHist([src],[1],None,[256],[0,256])
hist3 = cv2.calcHist([src],[2],None,[256],[0,256])
fig_size = plt.rcParams["figure.figsize"]
fig_size[0] = 12
fig_size[1] = 9
srcRgb = cv2.cvtColor(src, cv2.COLOR_BGR2RGB)
plt.subplot(221)
plt.imshow(srcRgb)
plt.title('Imagen Original')
plt.ylabel('Vertical pixels')
plt.xlabel('Horizontal pixels')
plt.subplot(222)
plt.plot(hist1, color = colors[0])
plt.title('Histograma Azul')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(223)
plt.plot(hist2, color = colors[1])
plt.title('Histograma Verde')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(224)
plt.plot(hist3, color = colors[2])
plt.title('Histograma Rojo')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.suptitle('Imagen inicial y sus histogramas', fontsize=16)
b, g, r = cv2.split(src)
b = np.array(b,dtype = int)
g = np.array(g,dtype = int)
r = np.array(r,dtype = int)
width , height, depth = src.shape
pixTotalStr = int(width*height*pixPercent)
print(pixTotalStr)
pix_accb = 0
pix_accg = 0
pix_accr = 0
for i in range(0,256,1):
if (pix_accb < pixTotalStr):
pix_accb += int(hist1[i])
if pix_accb >= pixTotalStr:
cb = i
if (pix_accg < pixTotalStr):
pix_accg += int(hist2[i])
if pix_accg >= pixTotalStr:
cg = i
if (pix_accr < pixTotalStr):
pix_accr += int(hist3[i])
if pix_accr >= pixTotalStr:
cr = i
print(cb)
print(cg)
print(cr)
pix_accb = 0
pix_accg = 0
pix_accr = 0
for i in range(255,-1,-1):
if (pix_accb < pixTotalStr):
pix_accb += int(hist1[i])
if pix_accb >= pixTotalStr:
db = i
if (pix_accg < pixTotalStr):
pix_accg += int(hist2[i])
if pix_accg >= pixTotalStr:
dg = i
if (pix_accr < pixTotalStr):
pix_accr += int(hist3[i])
if pix_accr >= pixTotalStr:
dr = i
print(db)
print(dg)
print(dr)
for i in range (0, width, 1):
for j in range (0, height, 1):
b[i,j] = ((b[i,j] - cb)*(bi-ai)//(db-cb)) + ai
if b[i,j] > 255:
b[i,j] = 255
elif b[i,j] < 0:
b[i,j] = 0
g[i,j] = ((g[i,j] - cg)*(bi-ai)//(dg-cg)) + ai
if g[i,j] > 255:
g[i,j] = 255
elif g[i,j] < 0:
g[i,j] = 0
r[i,j] = ((r[i,j] - cr)*(bi-ai)//(dr-cr)) + ai
if r[i,j] > 255:
r[i,j] = 255
elif r[i,j] < 0:
r[i,j] = 0
b = np.array(b,dtype = np.uint8)
g = np.array(g,dtype = np.uint8)
r = np.array(r,dtype = np.uint8)
histb = cv2.calcHist([b], [0], None, [256], [0, 256])
histg = cv2.calcHist([g], [0], None, [256], [0, 256])
histr = cv2.calcHist([r], [0], None, [256], [0, 256])
image_proc = cv2.merge((b,g,r))
image_procRgb = cv2.cvtColor(image_proc, cv2.COLOR_BGR2RGB)
plt.figure()
plt.subplot(221)
plt.imshow(image_procRgb)
plt.title('Imagen Procesada')
plt.ylabel('Vertical pixels')
plt.xlabel('Horizontal pixels')
plt.subplot(222)
plt.plot(histb, color = colors[0])
plt.title('Histograma Azul')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(223)
plt.plot(histg, color = colors[1])
plt.title('Histograma Verde')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(224)
plt.plot(histr, color = colors[2])
plt.title('Histograma Rojo')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.suptitle('Estiramiento de histogramas en el espacio RGB', fontsize=16)
image_hsv = cv2.cvtColor(image_proc, cv2.COLOR_BGR2HSV)
hc, sc, vc = cv2.split(image_hsv)
hist1 = cv2.calcHist([image_hsv],[0],None,[256],[0,256])
hist2 = cv2.calcHist([image_hsv],[1],None,[256],[0,256])
hist3 = cv2.calcHist([image_hsv],[2],None,[256],[0,256])
image_hsv_RGB = cv2.cvtColor(image_hsv, cv2.COLOR_BGR2RGB)
plt.figure()
plt.subplot(221)
plt.imshow(image_hsv_RGB)
plt.title('Imagen Inicial en HSV')
plt.ylabel('Vertical pixels')
plt.xlabel('Horizontal pixels')
plt.subplot(222)
plt.plot(hist1, color = colors[0])
plt.title('Histograma Hue')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(223)
plt.plot(hist2, color = colors[1])
plt.title('Histograma Saturation')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(224)
plt.plot(hist3, color = colors[2])
plt.title('Histograma Value')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.suptitle('Imagen en el espacio HSV', fontsize=16)
pix_acch = 0
pix_accs = 0
pix_accv = 0
for i in range(0,256,1):
if (pix_acch < pixTotalStr):
pix_acch += int(hist1[i])
if pix_acch >= pixTotalStr:
ch = i
if (pix_accs < pixTotalStr):
pix_accs += int(hist2[i])
if pix_accs >= pixTotalStr:
cs = i
if (pix_accv < pixTotalStr):
pix_accv += int(hist3[i])
if pix_accv >= pixTotalStr:
cv = i
print(ch)
print(cs)
print(cv)
pix_acch = 0
pix_accs = 0
pix_accv = 0
for i in range(255,-1,-1):
if (pix_acch < pixTotalStr):
pix_acch += int(hist1[i])
if pix_acch >= pixTotalStr:
dh = i
if (pix_accs < pixTotalStr):
pix_accs += int(hist2[i])
if pix_accs >= pixTotalStr:
ds = i
if (pix_accv < pixTotalStr):
pix_accv += int(hist3[i])
if pix_accv >= pixTotalStr:
dv = i
print(db)
print(dg)
print(dr)
sc = np.array(sc,dtype = int)
vc = np.array(vc,dtype = int)
for i in range (0, width, 1):
for j in range (0, height, 1):
sc[i,j] = ((sc[i,j] - cs)*(bi-ai)/(ds-cs)) + ai
if sc[i,j] > 255:
sc[i,j] = 255
elif sc[i,j] < 0:
sc[i,j] = 0
vc[i,j] = ((vc[i,j] - cv)*(bi-ai)/(dv-cv)) + ai
if vc[i,j] > 255:
vc[i,j] = 255
elif vc[i,j] < 0:
vc[i,j] = 0
sc = np.array(sc,dtype = np.uint8)
vc = np.array(vc,dtype = np.uint8)
image_proc2 = cv2.merge((hc, sc, vc))
hist1 = cv2.calcHist([image_proc2],[0],None,[256],[0,256])
hist2 = cv2.calcHist([image_proc2],[1],None,[256],[0,256])
hist3 = cv2.calcHist([image_proc2],[2],None,[256],[0,256])
image_proc2Rgb = cv2.cvtColor(image_proc2, cv2.COLOR_BGR2RGB)
plt.figure()
plt.subplot(221)
plt.imshow(image_proc2Rgb)
plt.title('Imagen Procesada en HSV')
plt.ylabel('Vertical pixels')
plt.xlabel('Horizontal pixels')
plt.subplot(222)
plt.plot(hist1, color = colors[0])
plt.title('Histograma Hue')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(223)
plt.plot(hist2, color = colors[1])
plt.title('Histograma Saturation')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(224)
plt.plot(hist3, color = colors[2])
plt.title('Histograma Value')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.suptitle('Imagen procesada en el espacio HSV', fontsize=16)
image_final = cv2.cvtColor(image_proc2, cv2.COLOR_HSV2BGR)
cv2.imwrite('images/proceso1.jpg', image_final)
hist1 = cv2.calcHist([image_final],[0],None,[256],[0,256])
hist2 = cv2.calcHist([image_final],[1],None,[256],[0,256])
hist3 = cv2.calcHist([image_final],[2],None,[256],[0,256])
image_finalRgb = cv2.cvtColor(image_final, cv2.COLOR_BGR2RGB)
plt.figure()
plt.subplot(221)
plt.imshow(image_finalRgb)
plt.title('Imagen Final en RGB')
plt.ylabel('Vertical pixels')
plt.xlabel('Horizontal pixels')
plt.subplot(222)
plt.plot(hist1, color = colors[0])
plt.title('Histograma Azul')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(223)
plt.plot(hist2, color = colors[1])
plt.title('Histograma Verde')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.subplot(224)
plt.plot(hist3, color = colors[2])
plt.title('Histograma Rojo')
plt.ylabel('Number of pixels')
plt.xlabel('Intensity')
plt.suptitle('Imagen Final del Proceso 1', fontsize=16)
plt.show()
| true |
d002857705d224ddefd31188bea906d5680b6afd | Python | comnetstud/ARIES | /src/main/python/aries/simulation/solver/linear_solver.py | UTF-8 | 6,770 | 2.546875 | 3 | [
"MIT"
] | permissive | import numpy as np
from aries.core.constants import PCC_VOLTAGE, LINEAR_SOLVER
from aries.simulation import simulation_utils
from aries.simulation.solver.solver import Solver
class LinearSolver(Solver):
"""Solver to determine the electrical state of the grid"""
# The constructor just takes the topology and the lines specs as parameters
# That's because to build the network matrix, we need to know the actual state of the loads
# and this (possibly) changes at every time step.
def __init__(self, paths, nodes, lines):
"""Initialize the grid configuration"""
super().__init__(paths=paths, nodes=nodes, lines=lines)
self.type = LINEAR_SOLVER
def build(self, agents_state):
"""Build the KCL-KVL matrix"""
# Init the nodes matrix to zero
lines_to_idx = {j: i for i, j in enumerate(self.lines.keys())}
agents_to_idx = {j: i for i, j in enumerate(agents_state.keys())}
b = np.zeros(len(self.lines), dtype=np.complex)
for agent in agents_state.keys():
for path in self.paths[agent].active_paths():
if path['active'] == 1:
for line_name in path['path']:
inject_power = np.complex(agents_state[agent]['inject_power']['active_power'],
agents_state[agent]['inject_power']['reactive_power'])
demand_power = np.complex(agents_state[agent]['demand_power']['active_power'],
agents_state[agent]['demand_power']['reactive_power'])
net_power = demand_power - inject_power
b[lines_to_idx[line_name]] += simulation_utils.current_from_power(
voltage_rating=PCC_VOLTAGE,
power=net_power)
drops = np.zeros(len(b), dtype=np.complex)
for line in self.lines.keys():
line_idx = lines_to_idx[line]
line_imp = np.complex(self.lines[line].resistance, self.lines[line].reactance)
drops[line_idx] = line_imp * b[line_idx]
voltages = np.zeros(len(agents_state))
for agent in agents_state.keys():
inject_power = np.complex(agents_state[agent]['inject_power']['active_power'],
agents_state[agent]['inject_power']['reactive_power'])
demand_power = np.complex(agents_state[agent]['demand_power']['active_power'],
agents_state[agent]['demand_power']['reactive_power'])
net_power = demand_power - inject_power
if np.real(net_power) <= 0:
voltages[agents_to_idx[agent]] = 230
else:
for path in self.paths[agent].active_paths():
if path['active'] == 1:
voltage = 0
for branch in path['path']:
branch_idx = lines_to_idx[branch]
voltage += drops[branch_idx]
if branch == path['path'][-1]:
voltages[agents_to_idx[agent]] = 230 - np.real(voltage)
b = np.zeros(len(self.lines), dtype=np.complex)
for agent in agents_state.keys():
for path in self.paths[agent].active_paths():
if path['active'] == 1:
for line_name in path['path']:
inject_power = np.complex(agents_state[agent]['inject_power']['active_power'],
agents_state[agent]['inject_power']['reactive_power'])
demand_power = np.complex(agents_state[agent]['demand_power']['active_power'],
agents_state[agent]['demand_power']['reactive_power'])
net_power = demand_power - inject_power
b[lines_to_idx[line_name]] += simulation_utils.current_from_power(
voltage_rating=voltages[agents_to_idx[agent]],
power=net_power)
# Map the dictionary keys to the column indices
solution = np.zeros(len(self.lines) + len(agents_state), dtype=np.complex)
variables_to_idx = list(self.lines.keys()) + list(agents_state.keys())
variables_to_idx = {variables_to_idx[i]: i for i in range(len(variables_to_idx))}
for l in self.lines.keys():
l_idx = variables_to_idx[l]
solution[l_idx] = b[lines_to_idx[l]]
for a in agents_state.keys():
a_idx = variables_to_idx[a]
inject_power = np.complex(agents_state[a]['inject_power']['active_power'],
agents_state[a]['inject_power']['reactive_power'])
demand_power = np.complex(agents_state[a]['demand_power']['active_power'],
agents_state[a]['demand_power']['reactive_power'])
net_power = demand_power - inject_power
solution[a_idx] = simulation_utils.current_from_power(voltage_rating=voltages[agents_to_idx[a]],
power=net_power)
return solution, variables_to_idx
def power_from_main(self, grid_solution):
complex_current = np.complex(grid_solution['B0']['real'], grid_solution['B0']['imag'])
return PCC_VOLTAGE * np.conj(complex_current)
def power_distribution_loss(self, grid_solution):
power = 0
for line_name in self.lines.keys():
line_impedance = np.complex(self.lines[line_name].resistance, self.lines[line_name].reactance)
line_current = np.complex(grid_solution[line_name]['real'], grid_solution[line_name]['imag'])
power += np.real(line_impedance * line_current * np.conj(line_current))
return power
def solve(self, agents_state):
"""Solve the linearized approximation of the grid"""
solution, variables_to_idx = self.build(agents_state)
solution_dict = {
key: {'real': np.real(solution[variables_to_idx[key]]), 'imag': np.imag(solution[variables_to_idx[key]])}
for key in variables_to_idx.keys()}
power_from_main = self.power_from_main(grid_solution=solution_dict)
distribution_loss = self.power_distribution_loss(grid_solution=solution_dict)
solution_dict['power_from_main'] = {'real': np.real(power_from_main), 'imag': np.imag(power_from_main)}
solution_dict['distribution_loss'] = {'real': distribution_loss, 'imag': 0}
# Returns the currents flowing through all the lines and agents
return solution_dict
| true |
7ebb42afdf7d2f31247359992cdef77cbcb1a102 | Python | Prashitraj/CNN_NoisyClassifier | /code/dataset_split_noisy.py | UTF-8 | 523 | 2.71875 | 3 | [] | no_license | import os
from tqdm import tqdm
rdir = "../data/noisy1/simple"
list = os.listdir(rdir) # dir is your directory path
number_files = len(list)
print(number_files)
train = list[:8000]
val = list[8000:9000]
test = list[9000:]
for file in tqdm(train):
os.rename(rdir+"/"+file,"../data/simple/train/noisy"+"/noisy_"+file)
for file in tqdm(val):
os.rename(rdir+"/"+file,"../data/simple/validation/noisy"+"/noisy_"+file)
for file in tqdm(test):
os.rename(rdir+"/"+file,"../data/simple/test/noisy"+"/noisy_"+file)
| true |
d7f0bd41f56c84c0d34f010cd0573dc5742331c9 | Python | smildas/Uqibuiti-country-changer | /Threading.py | UTF-8 | 3,171 | 2.671875 | 3 | [] | no_license | import threading
import paramiko
from SshConnection import *
from IpAddressHandler import *
class Threader(threading.Thread):
def __init__(self, gui, *args, **kwargs):
threading.Thread.__init__(self, *args, **kwargs)
self.daemon = True
self.gui = gui
def run(self):
self.gui.start_button['state'] = 'disabled'
self.gui.del_info_text()
self.gui.insert_text_to_info("Spouštím se")
self.gui.progress_bar(0)
error = False
ip_handler = IpAddressHandler(self.gui)
ips = None
try:
ips = ip_handler.procces_input()
except ValueError as e:
error = True
self.gui.insert_text_to_info(str(e))
self.gui.progress_bar(0)
if not error and ips is not None:
ip_to_change_country = len(ips)
processed_ips = 0
self.gui.insert_text_to_info("")
self.gui.insert_text_to_info("")
self.gui.insert_text_to_info("Začíná proces změn na aktivních zařízeních.")
# pokud není prázdný list s ip adresama tam projdi seznam
if ips is not None:
for ip in ips:
self.gui.insert_text_to_info(str(ip) + ": Probíhá změna země")
ssh = SshConnection(ip, self.gui)
connected = False
# try-catch pro ošetření stavu, kdy nemusí po 180s naskočit anténa
# pokud nenaskočí tak se ukončí smyčka a informuje uživatele co se stalo
try:
connected = ssh.try_change_country()
# chyba kdy nenaskočí zařízení po změně ip
except ConnectionError as e:
self.gui.insert_text_to_info(str(e))
self.gui.show_message_box_error("error", e)
error = True
break
# pokus o připojení k zařízení, které nemá ubnt loginy
except paramiko.ssh_exception.SSHException as e:
self.gui.insert_text_to_info(e)
processed_ips += 1
self.update_progress_bar(ip_to_change_country, processed_ips)
continue
# jiná chyba, která může nastat
except EOFError as e:
self.gui.insert_text_to_info(e)
if connected:
self.gui.insert_text_to_info(str(ip) + ": Změna proběhla v pořádku")
else:
self.gui.insert_text_to_info(str(ip) + ": Nejedná se o Ubiquiti nebo byly zadány nesprávné loginy")
processed_ips += 1
self.update_progress_bar(ip_to_change_country, processed_ips)
if not error and ips is not None:
self.gui.show_message_box_info("Úspěch", "Změna země na všech zařízeních proběhla v pořádku")
self.gui.start_button['state'] = 'normal'
self.gui.progress_bar(0)
def update_progress_bar(self, final_val, current_val):
percentage = (current_val / final_val) * 100
self.gui.progress_bar(percentage)
| true |
aa3e4cf9d318e3b4f404341bab0a9444bb54986c | Python | chipperrip/IN1900 | /veke 10/Planet.py | UTF-8 | 1,017 | 4.15625 | 4 | [] | no_license | """
Frå fysikkheftet:
Oppgave 7.1 - Planet-klasse
"""
import numpy as np
class Planet:
def __init__ (self, name, radius, mass, population):
self.name = name
self.radius = radius # in m
self.mass = mass # in kg
self.population = population # in people
def density (self):
# D = M/V, V = 4/3 * pi * r**3
return self.mass/((4/3)*np.pi*self.radius**3) # in kg/m^3
def print_info (self):
print()
print('Planet info:')
print('Name\t= '+self.name)
print('Radius\t= %g m' %(self.radius))
print('Mass\t= %g kg' %(self.mass))
print('Density\t= %g kg/m^3' %(self.density()))
print('Population = %g people' %(self.population))
print()
planet1 = Planet('Earth', 6371000, 5.97237e24, 7497486172)
planet1.print_info()
print (planet1.name, "has a population of", planet1.population)
"""
Terminal python Planet.py
Planet info:
Name = Earth
Radius = 6.371e+06 m
Mass = 5.97237e+24 kg
Density = 5513.6 kg/m^3
Population = 7.49749e+09 people
Earth has a population of 7497486172
""" | true |
9fec32e776452857c2ecb8eb8fdc51b9b9bd09dc | Python | Konrad-Ziarko/SudokuSolver | /100Taskschalleng/module2.py | UTF-8 | 470 | 3.734375 | 4 | [] | no_license | number = int(input('number for which factorial will be computed: '))
import time
start_time = time.time()
def factorialRecursion(base_Number):
if base_Number <= 1:
print(time.time() - start_time)
return 1
else:
return factorialRecursion(base_Number-1)*base_Number
print (factorialRecursion(number))
start_time2 = time.time()
result = 1;
for iter in range (2, number+1):
result *= iter
print(time.time() - start_time2)
print (result) | true |
f899b73dc0fe4590673dc621ceed8a33c3d0dd13 | Python | mooja/dailyprogrammer | /test_challenge50easy.py | UTF-8 | 590 | 3.015625 | 3 | [] | no_license | #!/usr/bin/env python
# encoding: utf-8
#######################################################################
# challenge 50 easy #
#######################################################################
import pytest
from challenge50easy import pick_two
@pytest.mark.parametrize('input,expected', [
((100, (5, 75, 25)), (2, 3)),
((200, (150,24,79,50,88,345,3)), (1, 4)),
((8, (2,1,9,4,4,56,90,3 )), (4, 5))
])
def test_pick_two(input, expected):
assert pick_two(*input) == expected
| true |
ed90a05100b286693809a3b6988122c81f3d15e7 | Python | kennyi/Python_Project | /airportClass.py | UTF-8 | 996 | 3.265625 | 3 | [] | no_license | import csv
# make the airport class - give constructor the values below.
class Airport:
def __init__(self, airportname="", cityname="", countryname="", code3="",lat=0,long=0):
self.airportname = airportname
self.cityname = cityname
self.countryname = countryname
self.code3 = code3
self.lat = lat
self.long = long
def getairportname(self):
return self.airportname
def getcityname(self):
return self.cityname
def getcountryname(self):
return self.countryname
def getcode3(self):
return self.code3
def getlat(self):
return self.lat
def getlong(self):
return self.long
def createAirport():
with open('airport.csv', 'r', encoding="UTF8") as f:
reader = csv.reader(f)
airportLookupDict = {}
for row in reader:
airportLookupDict[row[4]] = Airport(row[1], row[2],row[3], row[4],row[6],row[7])
return airportLookupDict
| true |
e51aff4c1b76a3b6113d50dc1602d8e5fa7dc308 | Python | arimakaoru/CameraSystem | /source/detection_block/get_diff.py | UTF-8 | 4,176 | 2.703125 | 3 | [
"MIT"
] | permissive | # -*- coding: UTF-8 -*-
import cv2
import os
import numpy as np
import matplotlib.pyplot as plt
import sys
sys.path.append('../numberDetection')
from Camera import clipNumber, captureImage
data_directory = "data/original"
def show_match(file_a, file_b):
# 画像の読み込み
imgA = cv2.imread(file_a)
imgB = cv2.imread(file_b)
# グレー変換
grayA = cv2.cvtColor(imgA, cv2.COLOR_BGR2GRAY)
grayB = cv2.cvtColor(imgB, cv2.COLOR_BGR2GRAY)
# 画像サイズの取得
height, width = grayA.shape
# 部分画像を作って、マッチングさせる
result_window = np.zeros((height, width), dtype=imgA.dtype)
for start_y in range(0, height - 100, 50):
for start_x in range(0, width - 100, 50):
window = grayA[start_y:start_y + 100, start_x:start_x + 100]
match = cv2.matchTemplate(grayB, window, cv2.TM_CCOEFF_NORMED)
_, _, _, max_loc = cv2.minMaxLoc(match)
matched_window = grayB[max_loc[1]:max_loc[1] + 100, max_loc[0]:max_loc[0] + 100]
result = cv2.absdiff(window, matched_window)
result_window[start_y:start_y + 100, start_x:start_x + 100] = result
plt.imshow(result_window)
def match(file_a, file_b):
# 画像の読み込み
imgA = cv2.imread(file_a)
imgB = cv2.imread(file_b)
# グレー変換
grayA = cv2.cvtColor(imgA, cv2.COLOR_BGR2GRAY)
grayB = cv2.cvtColor(imgB, cv2.COLOR_BGR2GRAY)
# 画像サイズの取得
height, width = grayA.shape
# 部分画像を作って、マッチングさせる
result_window = np.zeros((height, width), dtype=imgA.dtype)
for start_y in range(0, height - 100, 50):
for start_x in range(0, width - 100, 50):
window = grayA[start_y:start_y + 100, start_x:start_x + 100]
match = cv2.matchTemplate(grayB, window, cv2.TM_CCOEFF_NORMED)
_, _, _, max_loc = cv2.minMaxLoc(match)
matched_window = grayB[max_loc[1]:max_loc[1] + 100, max_loc[0]:max_loc[0] + 100]
result = cv2.absdiff(window, matched_window)
result_window[start_y:start_y + 100, start_x:start_x + 100] = result
# マッチングした結果できた差分画像の輪郭を抽出し、四角で囲む
_, result_window_bin = cv2.threshold(result_window, 127, 255, cv2.THRESH_BINARY)
contours, _ = cv2.findContours(result_window_bin, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
imgC = imgA.copy()
for contour in contours:
print(contour)
min = np.nanmin(contour, 0)
max = np.nanmax(contour, 0)
loc1 = (min[0][0], min[0][1])
loc2 = (max[0][0], max[0][1])
cv2.rectangle(imgC, loc1, loc2, 255, 2)
# 画像表示する
plt.subplot(1, 3, 1), plt.imshow(cv2.cvtColor(imgA, cv2.COLOR_BGR2RGB)), plt.title('A'), plt.xticks([]), plt.yticks(
[])
plt.subplot(1, 3, 2), plt.imshow(cv2.cvtColor(imgB, cv2.COLOR_BGR2RGB)), plt.title('B'), plt.xticks([]), plt.yticks(
[])
plt.subplot(1, 3, 3), plt.imshow(cv2.cvtColor(imgC, cv2.COLOR_BGR2RGB)), plt.title('Answer'), plt.xticks(
[]), plt.yticks([])
plt.show()
def test(file_a, file_b):
# 画像の読み込み
img_src1 = cv2.imread(file_a, 1)
img_src2 = cv2.imread(file_b, 1)
fgbg = cv2.bgsegm.createBackgroundSubtractorMOG()
fgmask = fgbg.apply(img_src1)
fgmask = fgbg.apply(img_src2)
# 表示
cv2.imshow('frame', fgmask)
# 検出画像
bg_diff_path = './diff.jpg'
cv2.imwrite(bg_diff_path, fgmask)
cv2.waitKey(0)
cv2.destroyAllWindows()
def cripping_field(file):
clipNumber(file, "clip_field.png", output_size=[640, 640],
l_top=[194, 279], l_btm=[1145, 625],
r_top=[1278, 149], r_btm=[675, 65])
if __name__ == '__main__':
#data_directory = "./"
src_name = captureImage(target_dir="./", url="http://raspberrypi.local/?action=stream")
cripping_field(src_name)
none_fig = os.path.join(data_directory, "clip_field_none_block.png")
fix_fig = os.path.join(data_directory, "clip_field_has_block.png")
test(none_fig, fix_fig)
#cripping_field(fix_fig) | true |
69f0d3267ab04d982a0959e1277fbb15953d8373 | Python | dr-dos-ok/Code_Jam_Webscraper | /solutions_python/Problem_136/2441.py | UTF-8 | 1,546 | 3.109375 | 3 | [] | no_license |
def main(input_address):
input_file = open(input_address, "r")
output_file = open("D:\\output.txt","w")
solve(input_file, output_file)
input_file.close()
def solve(input_file, output_file):
cases = input_file.readline()
for i in range(int(cases)):
parameters = input_file.readline().split()
for t in range(3):
parameters[t] = float(parameters[t])
#print parameters
output_file.write("Case #" +
str(i+1) + ": " + str(checkSeconds(parameters, parameters[0],parameters[0]/2)) + "\n")
#print "\n\n"
def checkSeconds(param, current, time, rate = 2):
#param[0] is the cost of the farm
#param[1] is the addition one farm makes to the rate of cookie-making
#param[2] is the final number of cookies needed
#current is the current number of cookies
#time is the time elapsed until the current point (in seconds)
#rate is the current rate of cookie increase (per second)
#print str(current) + "/" + str(time) + "/" + str(rate)
while current < param[2]:
if (param[2]-current) / rate <= (param[2] - current +param[0])/(rate+param[1]):
current = current+param[0]
time = time + (param[0]/rate)
else:
time = time + (param[0]/(rate+param[1]))
rate = rate+param[1]
if current == param[2]:
return time
elif current > param[2]:
return time - ((current - param[2]) / rate)
main("D:\\B-large.in")
| true |
623e7a20aaefad97f22603d3097fcb1ec0a9cea7 | Python | cybersaksham/Python-Tutorials | /42_class_methods.py | UTF-8 | 755 | 4.15625 | 4 | [] | no_license | class Employee:
leaves = 8
def __init__(self, name, salary, role):
self.name = name
self.salary = salary
self.role = role
def emp_det(self):
return f"Name is {self.name}, Salary is {self.salary}, Role is {self.role} & leaevs are {self.leaves}"
@classmethod # This is class method i.e. it is applied to all instances
def change_leaves(cls, newleaves): # Takes first argument as cls by default
cls.leaves = newleaves
saksham = Employee("Saksham", 5000, "Programmer")
harry = Employee("Harry", 2500, "Instructor")
harry.change_leaves(34) # It would change for all instances now
print(f"Details of saksham are :-\n{saksham.emp_det()}")
print(f"Details of harry are :-\n{harry.emp_det()}")
| true |
d847e666d2ad5383e869993d7132b871ad235350 | Python | UrsaOK/Mallit | /Olio-ohjelmointi/Oliopallot.py | UTF-8 | 832 | 3.265625 | 3 | [] | no_license | from turtle import *
from math import *
import time
class Pallo(object):
def __init__(self, x, y):
self.x = x
self.y = y
def draw(self):
penup()
goto(self.x, self.y)
pendown()
circle(30)
class Pomppiva(Pallo):
def __init__(self, x, y, speedx=0, speedy=0):
super(Pomppiva, self).__init__(x, y)
self.speedx = speedx
self.speedy = speedy
def update(self):
# pomppu
if self.y < 0:
self.y = 0
self.speedy *= -1
# painovoima
self.speedy -= 0.1
# eulerintegraatio
self.x += self.speedx
self.y += self.speedy
class Liikkuva(Pallo):
def update(self):
self.x += 1
hideturtle()
speed(0)
delay(0)
pallot = []
pallot.append(Pomppiva(0, 100))
pallot.append(Liikkuva(50, 170))
pallot.append(Pomppiva(150, 300))
while True:
clear()
for p in pallot:
p.update()
p.draw()
time.sleep(1/30.0) | true |
3e3b8e32d8fa476ea74dd934b0f9093c84d84a28 | Python | YafengWangGitHub/YOLO-video-detection | /read.py | UTF-8 | 523 | 2.828125 | 3 | [] | no_license | import time
import pyttsx3
i = 0
history = []
engine = pyttsx3.init()
while True:
f = open('testfile.txt','r')
buf = f.read()
if buf not in history:
history.append(str(buf))
if buf == 'person':
engine.say("I see motherfucker")
engine.runAndWait()
else:
engine.say("I see" + buf)
engine.runAndWait()
i += 1
print(str(len(history)))
if i>2:
history.clear()
i = 0
| true |
2614eecc02d48618671120d2cd3a090147c3b011 | Python | AliceHincu/FP-Assignment02 | /program.py | UTF-8 | 14,598 | 4.25 | 4 | [] | no_license | from math import sqrt
# ----- UI section ------
# write all functions that have input or print statements here
def read_real_part():
'''
Reads the real part of the complex number that was written
:return: The real part of the complex number (type:int)
'''
while True:
try:
real_part = int(input("Write the real part of the number: "))
return real_part
except ValueError:
print("Oops! This is not a natural number! Try again!")
def read_imaginary_part():
'''
Read the imaginary part of the complex number that was written
:return: The imaginary part of the complex number (type:int)
'''
while True:
try:
imaginary_part = int(input("Write the imaginary part of the number: "))
return imaginary_part
except ValueError:
print("Oops! This is not a natural number! Try again!")
def print_menu():
'''
Shows the available commands
:return: Nothing
'''
print("\nWelcome! Here are the available commands:")
print('\t+: adds a new complex number at the final of the list')
print('\tadd list: adds multiple complex numbers at the final of the list')
print('\t\tCondition: The real and imaginary parts have to be from Z')
print('\tshow: shows the current list of complex numbers')
print('\tFor displaying on the console the longest sequence that observes a given property:')
print('\t\tp1: The difference between the modulus of consecutive numbers is a prime number.')
print('\t\tp2: Sum of its elements is 10+10i ')
print('\t\tCondition: The sequence has to have at least two numbers')
print('\texit: exit this program')
print('\t ---- optionals ----')
print('\t-: delete a current complex number at the desired position')
print('\tmodify: modifies a current complex number at the desired position')
print('\thelp: shows the menu instructions again')
def read_command():
'''
Reads the command that the person wants to do and checks if it is good.
:return: The command (type: string)
'''
list_of_commands = ['+', '-', 'modify', 'show', 'help', 'exit', 'p1', 'p2', 'add list']
while True:
try:
command = input("\nWrite the command: ")
if command in list_of_commands:
return command
else:
raise ValueError
except ValueError:
print("Oops! That was not a valid command! Please try again (write help if you are stuck): ")
def say_goodbye():
'''
Displays a goodbye message when the program is done ^-^
:return: Nothing
'''
print("Goodbye! I hope you enjoyed it!")
def show_list(list):
'''
Shows the list of complex numbers
:param list: The list of complex numbers (type: list)
:return: Nothing
'''
print("\nHere are the complex numbers:")
if not list:
print("The list is empty!")
else:
for i in range(len(list)):
re = list[i][0]
im = list[i][1]
if re == 0:
if im == 0:
print('\tZ' + str(i) + '= 0')
else:
print('\tZ' + str(i) + ' = ' + str(im) + 'i')
elif im == 0:
print('\tZ' + str(i) + ' = ' + str(re))
else:
print('\tZ' + str(i) + ' = ' + str(re) + '+' + str(im) + 'i')
def print_delete_error():
'''
It's called when you can't delete an element because the list is empty
:return: Nothing
'''
print("Oops! You can't delete a number from an empty list!")
def print_modify_error():
'''
It's called when you can't modify an element because the list is empty
:return: Nothing
'''
print("Oops! You can't modify a number from an empty list!")
def read_delete_position(len):
'''
It checks if the position is a natural number smaller than the length of list.
:param len: The position that was written (type: int)
:return: The position that was written (type: int)
'''
while True:
try:
poz = int(input("From what position do you want the number to be deleted?(The counting starts from 0): "))
if poz < 0 or poz >= len:
raise ValueError
else:
return poz
except ValueError:
print("Oops! That was either not a natural number or a number which is bigger than the length! Try again: ")
def print_number(i):
'''
Displays what number are we reading
:param i: the index of the number (type: int)
:return: Nothing
'''
print("\nNumber " + str(i+1) + ':')
def read_index():
'''
Read how many numbers do we want to add
:return: how many numbers do we want to add
'''
while True:
try:
index = int(input("How many numbers do you want to read: "))
if index <= 0:
raise ValueError
else:
return index
except ValueError:
print("Oops! That was not a valid natural number! Try again: ")
def read_modify_position(len):
'''
It checks if the position is a natural number smaller than the length of list.
:param len: The position that was written (type: int)
:return: The position that was written (type: int)
'''
while True:
try:
poz = int(input("From what position do you want the number to be modified?(The counting starts from 0): "))
if poz < 0 or poz >= len:
raise ValueError
else:
return poz
except ValueError:
print("Oops! That was either not a natural number or a number which is bigger than the length! Try again: ")
def show_list_p(list, index):
'''
It prints the list of the longest sequence which respects a certain property
:param list: the list of the longest sequence (type: list)
:param index: from where the sequence starts in the original list (type: int)
:return: Nothing
'''
if not list:
print("The list is empty!")
else:
for i in range(len(list)):
re = list[i][0]
im = list[i][1]
if re == 0:
if im == 0:
print('\tZ' + str(index + i) + '= 0')
else:
print('\tZ' + str(index + i) + ' = ' + str(im) + 'i')
elif im == 0:
print('\tZ' + str(index + i) + ' = ' + str(re))
else:
print('\tZ' + str(index + i) + ' = ' + str(re) + '+' + str(im) + 'i')
def display_list(list, index):
'''
Shows the list of complex numbers that respects the property
:param list: The list of complex numbers that respects the property (type: list)
:param index: From where the sequence starts (type: int)
:return: Nothing
'''
print("\nHere is the longest list of complex numbers that respects the property:")
if not list:
print("\nThe list is either empty or it does not respect the property/condition!")
else:
show_list_p(list, index)
# ----- Function section -----
def read_number():
'''
Read the complex number that was written
:return: The complex number (type: list)
'''
real_part = read_real_part()
imaginary_part = read_imaginary_part()
complex_number = [real_part, imaginary_part]
return complex_number
def get_real(number):
'''
Returns the real part of the complex number
:param number: The complex number (type: list)
:return: The real part of the number (type: int)
'''
return number[0]
def get_imaginary(number):
'''
Returns the imaginary part of the complex number
:param number: The complex number (type: list)
:return: The imaginary part of the number (type: int)
'''
return number[1]
def set_real(number, x):
'''
It changes the real part of the number
:param number: The complex number (type: list)
:param x: The new value for the real part (type: int)
:return: The new number (type: list)
'''
number[0] = x
return number
def set_imaginary(number, x):
'''
It changes the imaginary part of the number
:param number: The complex number (type: list)
:param x: The new value for the real part (type: int)
:return: The new number (type: list)
'''
number[1] = x
return number
def add_number(list_complex):
'''
Adds a new complex number to the list
:param list_complex: The list of complex numbers (type: list)
:return: Nothing
'''
complex_number = read_number()
list_complex.append(complex_number)
def add_list(list_complex):
'''
We add multiple complex numbers
:param list_complex: the list of complex numbers (type: list)
:return: Nothing
'''
index = read_index()
for i in range(index):
print_number(i)
add_number(list_complex)
def delete_number(list):
'''
It deletes a number from the list
:param list: The list of complex numbers (type: list)
:return: Nothing
'''
if not list:
print_delete_error()
else:
length = len(list)
poz = read_delete_position(length)
del list[poz]
def modify_number(list):
'''
Modifies the value of a number from the list
:param list: The list of complex numbers (type: list)
:return: Nothing
'''
if not list:
print_modify_error()
else:
length = len(list)
poz = read_modify_position(length)
r = read_real_part()
i = read_imaginary_part()
set_real(list[poz], r)
set_imaginary(list[poz], i)
def find_module(number):
'''
It calculates the module of the complex number
:param number: The complex number (type: list)
:return: The module (type: float)
'''
r = get_real(number)
i = get_imaginary(number)
module = sqrt(r ** 2 + i ** 2)
return module
def verify_if_prime(value):
'''
Verifies if the number is prime. If it is type float => not prime
:param value: The number that has to be checked (type: float)
:return: true -> it is prime; false -> it is not prime; (type: bool)
'''
if not value.is_integer():
return False
if value == 0 or value == 1:
return False
if value == 2:
return True
if value % 2 == 0:
return False
value_root = int(sqrt(value))
for divisor in range(3, value_root+1, 2):
if value % divisor == 0:
return False
return True
def check_property_1(list):
'''
It makes a list of the numbers' modules.
Then, it checks if the difference between two consecutive modules is a prime number
It creates a new list which tells us what numbers respect the property.
1 means the number and the next number from the list are respecting the property.
:param list: The list of complex numbers (type: list)
:return: The list that tells us which numbers respects the property (type: list)
'''
length = len(list)
list_modules = []
for i in range(length): # i calculate the modules
list_modules.append(find_module(list[i]))
list_property = []
for i in range(length-1): # i check if the difference is a prime nr
dif = abs(list_modules[i] - list_modules[i+1])
if verify_if_prime(dif):
list_property.append(1)
else:
list_property.append(0)
list_property.append(0)
return list_property
def do_property_1(list):
'''
Returns the longest subsequence that checks the following property:
-The difference between the modulus of consecutive numbers is a prime number.
:param list: the list of complex numbers (type: list)
:return: the longest subsequence (type: list)
'''
p1 = check_property_1(list)
max_length = 0
max_p1 = []
max_index = 0
for i in range(len(p1)):
p1_final = []
if p1[i]:
p1_final.append(list[i])
for j in range(i + 1, len(p1)):
if p1[j]:
p1_final.append(list[j])
else:
p1_final.append(list[j])
break
if len(p1_final) > max_length:
max_p1 = p1_final
max_length = len(p1_final)
max_index = i
display_list(max_p1, max_index)
def do_property_2(list):
'''
Returns the longest subsequence that checks the following property:
-Sum of its elements is 10+10i
:param list: the list of complex numbers (type: list)
:return: the longest subsequence (type: list)
'''
max_length = 0
max_index = 0
p2_final = []
for i in range(len(list)):
s_i = get_imaginary(list[i])
s_r = get_real(list[i])
length = 1
for j in range(i+1, len(list)):
s_i += get_imaginary(list[j])
s_r += get_real(list[j])
length += 1
if s_i == 10 and s_r == 10:
if length > max_length:
p2_final = []
for k in range(i, j+1):
p2_final.append(list[k])
max_length = len(p2_final)
max_index = i
elif s_i > 10 or s_r > 10:
break
display_list(p2_final, max_index)
def do_command(cmd, list_complex):
'''
It executes the command
:param cmd: The command that was chosen (type: string)
:param list_complex: The list of complex numbers (type: list)
:return: Nothing
'''
if cmd == 'exit':
say_goodbye()
elif cmd == '+':
add_number(list_complex)
elif cmd == 'add list':
add_list(list_complex)
elif cmd == 'show':
show_list(list_complex)
elif cmd == 'help':
print_menu()
elif cmd == '-':
delete_number(list_complex)
elif cmd == 'modify':
modify_number(list_complex)
elif cmd == 'p1':
do_property_1(list_complex)
elif cmd == 'p2':
do_property_2(list_complex)
if __name__ == '__main__':
print_menu()
list_complex = [[7, 0], [0, 5], [3, 0], [7, 8], [3, 4], [3, 0], [4, 3], [0, 3], [4, 3], [0, 4]]
# list_complex =[[9, 1], [1, 9], [3, 4], [3, 0], [4, 3], [0, 3], [0, 2], [7, 8], [2, 4], [3, 3]]
# list_complex = [[1,1]]
done = False
while not done:
cmd = read_command()
do_command(cmd, list_complex)
if cmd == 'exit':
done = True
| true |
9869224a66f7f123cd28e519b3457fbce8b2e718 | Python | funnyletter/py220-online-201904-V2 | /students/Daniel_Carrasco/lesson08/assignment/tests/test_inventory.py | UTF-8 | 1,080 | 2.984375 | 3 | [] | no_license | #!/usr/bin/env python3
'''
Autograde Lesson 8 assignment
'''
import os
from pathlib import Path
import pytest
import inventory as l
def remove_file(file):
'''
Removing any data that exists
'''
file = Path.cwd().with_name('data') / file
try:
os.remove(file)
except OSError:
pass
def test_add_furniture(invoice_file, customer_name, item_code, item_description,
item_monthly_price):
'''
Testing adding furniture
'''
remove_file(invoice_file)
l.add_furniture(invoice_file, 'Emilia', 'LR04', 'Sofa', 50.00)
l.add_furniture(invoice_file, 'John', 'PS60', 'Chair', 150.00)
assert os.path.isfile(invoice_file)
with open(invoice_file, 'r') as csv_invoice:
rows = csv_invoice.readlines()
assert rows[0] == "Emilia, LR04, Sofa, 50.00\n"
assert rows[1] == "John, PS60, Chair, 150.00\n"
assert len(rows) == 2
remove_file(invoice_file)
def test_single_customer(customer_name, invoice_file):
'''
Testing the single customer function
'''
| true |
f5bf81fed0a12ac12b1a707a544fc417e5c9376a | Python | BiberTaggerProject/tagger | /corpus.py | UTF-8 | 6,848 | 3.234375 | 3 | [] | no_license | from os import walk, mkdir, path
from time import time
from collections import defaultdict
from text import Text
from errors import CorpusError
class Corpus:
"""
Used to manage corpora of CLAWS-tagged texts.
Example:
>>> c = Corpus('/home/mike/corpora/Mini-CORE_tagd_H')
>>> c.convert('/home/mike/corpora/Mini-CORE_tagd_H_BTT')
Arguments:
folder: Directory containing corpus. Can have one ore more level of subfolders.
encoding_in: Character set of corpus files e.g. UTF-8, ascii-us
"""
def __init__(self, folder, encoding_in='UTF-8'):
self.files = []
self.folder = folder
self.dirs = []
for dir_path, dir_names, file_names in walk(folder):
for fn in file_names:
self.files.append(path.join(dir_path, fn))
self.dirs.append(dir_path)
def convert(self, new_folder, ext='tec', stop_at=None, **kwargs):
"""Converts all CLAWS tagged texts in a directory to Biber tagged texts.
Arguments:
new_folder: Path to the new folder. New folder and subdirectories matching self.folder will be made if
they do not already exist.
Keyword arguments:
ext: File extension for new files.
stop_at: maximum number of files to convert
"""
t = time()
self.copy_dir_tree(new_folder)
for i, file_name in enumerate(self.files):
text = Text(file_name, **kwargs)
file_name = path.join(new_folder, file_name[len(self.folder) + 1:-3] + ext)
text.write(file_name)
if i == stop_at:
break
print('Converted', len(self.files), 'texts in', time() - t, 'seconds')
def copy_dir_tree(self, new_folder):
"""Makes new folder containing subfolders structured in the same way as the self.folder"""
for d in self.dirs:
d = new_folder + d[len(self.folder):]
if not path.exists(d):
mkdir(d)
def find(self, *token_tags, lowercase=True, whole_sent=False, sent_tail=False, save=False):
"""
Finds words and ngrams in a CLAWS tagged text.
Arguments:
token_tags: A tuple or tuples with token-tag pairs. Tuple item values must be str or NoneType.
Keyword Arguments:
lowercase: Makes the CLAWS tagged text tokens lowercase before comparing them with the token strings in token_tags
whole_sent: Returns the whole sentence of a match if true or only the matching token_tag pair if False.
Examples:
>>> c = Corpus('/home/mike/corpora/Mini-CORE_tagd_H')
Find a 'people' token with the 'NN' tag
>>> r = c.find(('people', 'NN'))
Find a 'people' token wih any tag followed by an 'in' token with any tag
>>> r = c.find(('people', None), ('in', None))
Find a 'people' token with a 'NN' tag followed by anything followed by any token with a 'VBZ' tag
>>> r = c.find(('people', 'NN'), (None, None), (None, 'VBZ'))
"""
matches = []
if [item for item in token_tags if type(item) != tuple or len(item) != 2]:
raise CorpusError("Token_tags must be tuples with two items having str or NoneType values")
for file_name in self.files:
text = Text(file_name, lowercase=lowercase)
for sent in text.sents:
match = []
ngram_ind = 0
for i, (word, tag) in enumerate(sent):
if lowercase: word = word.lower()
# word and tag must both match
if token_tags[ngram_ind][0] and token_tags[ngram_ind][1] and token_tags[ngram_ind] == (word, tag):
match.append((word,tag))
ngram_ind += 1
# word matches and no tag included in tuple for this item in token_tags
elif token_tags[ngram_ind][0] and token_tags[ngram_ind][1] is None and token_tags[ngram_ind][0] == word:
match.append((word, tag))
ngram_ind += 1
# tag matches and no word included in tuple for this item in token_tags
elif token_tags[ngram_ind][1] and token_tags[ngram_ind][0] is None and token_tags[ngram_ind][1] == tag:
match.append((word, tag))
ngram_ind += 1
elif token_tags[ngram_ind] == (None, None):
match.append((word, tag))
ngram_ind += 1
# No match
else:
match = []
ngram_ind = 0
# Adds to matches when finished
if ngram_ind == len(token_tags):
if whole_sent:
matches.append((file_name, sent))
elif sent_tail:
matches.append((file_name, sent[i:]))
else:
matches.append((file_name, match))
ngram_ind = 0
match = []
if save:
save_as = input('Save as: ')
output = '\n'.join(' '.join(tok + '_' + tag for tok, tag in line) for fn, line in matches)
with open(save_as, 'w', encoding='utf-8') as f:
f.write(output)
else:
return matches
def lex_freq(self, *tags, lowercase=True):
"""
Makes conditional frequency distribution of words and tags in a claws tagged text with tags as search string.
"""
freq_dist = {}
for file_name in self.files:
text = Text(file_name, lowercase=lowercase)
for sent in text.sents:
for word, tag in sent:
if tag in tags:
if not freq_dist.get(word, False):
freq_dist[word] = defaultdict(int)
freq_dist[word][tag] += 1
return freq_dist
def tag_freq(self, *words, lowercase=True):
"""
Makes conditional frequency distribution of words and tags in a claws tagged text with words as search string.
"""
freq_dist = {}
for file_name in self.files:
text = Text(file_name, lowercase=lowercase)
for sent in text.sents:
for word, tag in sent:
if word in words:
if not freq_dist.get(word, False):
freq_dist[word] = defaultdict(int)
freq_dist[word][tag] += 1
return freq_dist | true |
a3acabd84e0636478bfa6035091469f6c2348290 | Python | Adonais0/codeChallengeTembo | /CodeChallengeJune18.py | UTF-8 | 3,875 | 4.03125 | 4 | [] | no_license | # Without directly modifying the data structures, create a script in either
# python or javascript that cycles through all the parents and prints to the
# terminal the proper activities for their child's age group. When there are no
# more activities for that parent, print “Curriculum complete!”..
#
# (Make sure your script accounts for any edge cases in the provided data!)
parents = {
'Henry': {'childName': 'Calvin', 'age': 1},
'Ada': {'childName': 'Lily', 'age': 4},
'Emilia': {'childName': 'Petra', 'age': 2},
'Biff': {'childName': 'Biff Jr', 'age': 3},
'Milo': {}
}
activities = [
{
'age': 1,
'activity': [
'Go outside and feel surfaces.',
'Try singing a song together.',
'Point and name objects.'
]
},
{
'age': 2,
'activity': [
'Draw with crayons.',
'Play with soundmaking toys or instruments.',
'Look at family pictures together.'
]
},
{
'age': 3,
'activity': [
'Build with blocks.',
'Try a simple puzzle.',
'Read a story together.'
]
}
]
# - Personalize the message output to make it more friendly.
# - Allow users to input new activities & parents before executing the script.
while input('Do you wanna enter a new parent? (y/n)') == 'y':
newParent = input('Enter a new parent\'s name: ')
newChild = input(newParent + '\'s child\'s name is: ')
newAge = int(input(newChild + '\'s age is: '))
parents[newParent] = {'childName': newChild, 'age': newAge}
while input('Do you wanna enter some more activities? (y/n)') == 'y':
newActivity = input('Enter some new activity: ')
actAge = int(input(newActivity + ' is proper for kids at what age? '))
while not isinstance(actAge, int):
print ('please enter a valid age!')
actAge = input(newActivity + ' is proper for kids at what age? ')
ages = []
for act in activities:
ages.append(act['age'])
if actAge in ages:
list(filter(lambda activity: activity['age'] == actAge, activities))[0]['activity'].append(newActivity)
else:
activities.append({'age': actAge, 'activity': [newActivity]})
for parent in parents.keys():
print('++++++++++')
try:
print('Proper activities for ' + parent + '\'s kid: ')
for act in list(filter(lambda activity: activity['age'] == parents[parent]['age'], activities))[0]['activity']:
print(' - ' + act)
except:
if (parents[parent].keys()):
print ('There\'s no proper activity for ' + parent + '\'s kid!')
else:
print (parent + ' has no kid!')
continue
print('Curriculum complete!')
# Print one activity at a time per parent and continue cycling through until all parents have recieved all their activities.
print('=============ONE ACTIVITY PER PARENT================')
parentActivities = {}
for parent in parents.keys():
try:
actList = (list(filter(lambda activity: activity['age'] == parents[parent]['age'], activities))[0]['activity']).copy()
except:
actList = []
parentActivities[parent] = actList
while len(list(parentActivities)) != 0:
for parent in list(parentActivities):
if(len(parentActivities[parent]) != 0):
print(parent + ' \'s kid can: ' + parentActivities[parent].pop())
else:
parentActivities.pop(parent)
print(parent + '\'s kid\'s curriculum complete!')
print('++++++++++')
# Want to really shine and show us your chops? Work in some of these stretch
# goals using any tools or libraries you see fit.
# - Document your creation process with proper git workflow.
| true |
f2bb5d235513877a1c2ebc1a6a38b50488da5562 | Python | huangwenzi/qg_updataFile | /lib/svr_net_mgr.py | UTF-8 | 4,750 | 2.59375 | 3 | [] | no_license | import socket
import json
import threading
import select
import time
# 文件修改类型枚举
enum_update_file = 1 # 更新文件
enum_remove_file = 2 # 删除文件
# 发送指定数量字节
def send_data_len(c, send_len, data):
# 使用send(),最大可发送的数据也是int类型最大上限的值,32位的系统,int类型最高好象是65535,64KB左右.
one_len = 2048
sen_idx = 0
c.send(data)
# while True:
# if send_len - sen_idx > one_len:
# c.send(data[sen_idx : sen_idx+one_len])
# sen_idx += one_len
# else:
# c.send(data[sen_idx : ])
# sen_idx += len(data[sen_idx : ])
# print("send_len:" + str(sen_idx))
# break
# 监听socket
def listen_client(client_list, ser_socket):
inputs = [ser_socket,]
outputs = []
while True:
time.sleep(1)
rlist,wlist,eList = select.select(inputs,outputs,[],0.5)
# print("inputs:",inputs) # 查看inputs列表变化
# print("rlist:",rlist) # 查看rlist列表变化
# print("wlist:",wlist) # 查看rlist列表变化
# print("eList:",eList) # 查看rlist列表变化
for r in rlist:
if r == ser_socket: # 如果r是服务端
conn,address = r.accept()
inputs.append(conn) # 把连接的句柄加入inputs列表监听
client_list.append(conn)
print (address)
else:
# 尝试读取数据
client_data = None
try:
client_data = r.recv(1024)
# 下面可以添加处理
except :
inputs.remove(r)
client_list.remove(r)
class SvrNetMgr():
# 服务器socket
ser_socket = None
# 连接进来的客户端
client_list = []
# 初始化
def __init__(self, ip, port):
# 初始化监听的socket
self.serversocket = socket.socket()
self.serversocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.serversocket.bind((ip, port))
self.serversocket.listen(5)
# 分线程去监听接入的客户端
t1 = threading.Thread(target=listen_client, args=(self.client_list, self.serversocket))
t1.start()
# 给客户端发送文件
def send_file_to_client(self, file_path, src_path):
# 先把文件读取到内存
file_str = ""
# with open(src_path + file_path, 'r', encoding='UTF-8', errors='ignore') as f_in:
# file_str = f_in.read()
# send_obj = {"file_path":file_path, "file_str":file_str, "type": enum_update_file}
# send_str = json.dumps(send_obj)
# 二进制的格式打开文件
with open(src_path + file_path, 'rb') as f_in:
file_str = f_in.read()
# file_str = file_str.encode('utf-8', errors='ignore')
file_str_len = len(file_str)
send_obj = {"file_path":file_path, "type": enum_update_file, "len": file_str_len}
send_str = json.dumps(send_obj)
# 发送文件
for tmp_client in list(self.client_list):
try:
# 接收数据提示
begin_int = 1
send_byte = begin_int.to_bytes(4,byteorder='big', signed=False)
tmp_client.send(send_byte)
# 发送文件名和类型还有大小
# 下面这句不能少,不指定编码会失败
send_str = send_str.encode('utf-8', errors='ignore')
send_str_len = len(send_str)
send_byte = send_str_len.to_bytes(4,byteorder='big', signed=False)
tmp_client.send(send_byte)
tmp_client.send(send_str)
# 发送文件内容
# print(file_str)
print(send_obj)
send_data_len(tmp_client, file_str_len, file_str)
# tmp_client.send(file_str)
# time.sleep(0.1)
# tmp_client.send(send_str.encode('utf-8', errors='ignore'))
except :
self.client_list.remove(tmp_client)
# 客户端删除文件
def remove_file_to_client(self, file_path):
send_obj = {"file_path":file_path, "file_str":"", "type": enum_remove_file}
send_str = json.dumps(send_obj)
# 发送文件
for tmp_client in list(self.client_list):
try:
tmp_client.send(send_str.encode('utf-8', errors='ignore'))
except :
self.client_list.remove(tmp_client)
| true |
5db095330a9def3e96239124a0748e4f4111cb46 | Python | KKhushhalR2405/Face-X | /Recognition-Algorithms/Recognition using KNearestNeighbors/GUI.py | UTF-8 | 1,712 | 2.890625 | 3 | [
"MIT"
] | permissive | import cv2
import numpy as np
from sklearn.neighbors import KNeighborsClassifier
from tkinter import Tk , Label , Frame
data = np.load("face_data.npy")
# Name = input("Whom you want to search: \n")
# print(data.shape, data.dtype)
X = data[:, 1:].astype(int)
y = data[:, 0]
model = KNeighborsClassifier()
model.fit(X, y)
cap = cv2.VideoCapture(0)
detector = cv2.CascadeClassifier("./haarcascade_frontalface_default.xml")
while True:
ret, frame = cap.read()
if ret:
faces = detector.detectMultiScale(frame,1.1,4)
for face in faces:
x, y, w, h = face
cut = frame[y:y+h, x:x+w]
fix = cv2.resize(cut, (100, 100))
gray = cv2.cvtColor(fix, cv2.COLOR_BGR2GRAY)
out = model.predict([gray.flatten()])
cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 0, 0), 2)
cv2.putText(frame, str(f"User Identified:{out[0]}"), (x, y - 10), cv2.FONT_HERSHEY_COMPLEX, 2, (255, 0, 0), 2)
cv2.imshow("My Face", gray)
cv2.imshow("My Screen", frame)
key = cv2.waitKey(1)
if key == ord("q"):
break
cap.release()
cv2.destroyAllWindows()
root = Tk()
root.geometry("400x200")
root.maxsize(400, 200)
root.minsize(350,180)
root.configure(background='Azure')
root.title("Recogniser")
my = Label(text="Image Recogniser Result",bg = "Azure",
fg='Black', font=('comicsansms', 19, 'bold'))
my.pack()
my3 = Label(text=f'User Identified: {out}',bg = "Beige", fg='Black',font=('comicsansms', 15),relief="sunken")
my3.pack(pady=50)
root.mainloop()
| true |
88a965d4e8f6bf5ada24dbcba7c167a2e67bb08f | Python | BillSeitz/wiki_ebook_maker.py | /wiki_ebook_maker.py | UTF-8 | 10,352 | 2.9375 | 3 | [] | no_license | """
wiki_ebook_maker.py by BillSeitz started Oct'2013
Goal: generate an EBook from set of wiki pages (MoinMoin)
Process
* update WikiGraph db (not in this code)
* define chapter_pages in index.txt file - store in ebook_directory that you make beside this code
* run other_pages_list() (on WikiGraph db server) to query WikiGraph db for related pages, add those to index.txt
* run index_titles_gen() to create spaced-out versions of chapter names
* manually review index.txt to cut down to list of pages to include; edit spaced-out text for odd cases (add punctuation, eliminate some caps/spaces, etc.)
* run pages_scrape() to scrape every page, save HTML files
* run pages_clean() to
* page_clean_headers(): strip headers/footers
* page_clean_body(): clean up messy HTML (pretty specific to MoinMoin style)
* page_clean_links(): convert HTTP links to in-links for pages included
* page_add_twin(): add link to online-twin-page at bottom of each page
* do something to fix weird one-word WikiNames? (no, just did a couple cases byhand)
* generate TableOfContents index.html, manually tweak (add sections breaks, header/footer, etc.) (not in this code)
* convert into EBook using CaLibre (EPub first, then KindLe)
"""
""" setup web.py stuff for db access """
import web
from web import utils
#import config
import os
space_name = 'WebSeitzWiki'
wiki_root = 'http://webseitz.fluxent.com/wiki/'
ebook_directory = 'PrivateWikiNotebook'
chapters_file = 'index.txt'
intro_page = 'HackYourLifeWithAPrivateWikiNotebookGettingThingsDoneAndOtherSystems'
our_path = os.getcwd()
def other_pages_list(): # must run on WikiGraph/db server
""" scrape WikiGraph for pages linked to chapter_pages to make candidate list to include in book"""
path = os.path.join(our_path, ebook_directory)
chapters_full = os.path.join(path, chapters_file)
in_full = open(chapters_full, 'r')
chapter_pages = [chapter.strip() for chapter in in_full] # as string to pass to db
print chapter_pages
in_full.close()
if location == 'linode':
pages_out = db.select('mentions', what = "DISTINCT page_mentioned", where = "space_name = $space_name AND page_name IN $chapter_pages AND page_mentioned in (SELECT name FROM pages WHERE space_name = $space_name)", vars={'space_name': space_name, 'chapter_pages': chapter_pages})
pages_in = db.select('mentions', what = "DISTINCT page_name", where = "space_name = $space_name AND page_mentioned IN $chapter_pages", vars={'space_name': space_name, 'chapter_pages': tuple(chapter_pages)})
else:
print '---- no db! ----'
return False
other_pages = []
for page in pages_out:
if (page.page_mentioned not in other_pages) and (page.page_mentioned not in chapter_pages):
other_pages.append(page.page_mentioned)
for page in pages_in:
if (page.page_name not in other_pages) and (page.page_name not in chapter_pages):
other_pages.append(page.page_name)
other_pages.sort()
output_full = os.path.join(path, 'other_pages.txt')
out_f = open(chapters_full, 'a')
for chapter in other_pages:
out_f.write(chapter + '\n')
print '-- Now review/change other_pages.txt --'
return other_pages
def index_titles_gen():
"""Take index.txt list of pages and generate spaced-versions
which I can then edit by hand for ongoing use"""
import re
chapters_full = os.path.join(our_path, ebook_directory, chapters_file)
in_full = open(chapters_full, 'r')
chapters = in_full.readlines()
out_full = open(chapters_full, 'w')
for chapter in chapters:
chapter = chapter.strip()
pat = '([a-z])([A-Z])'
chapter_expand = re.sub(pat, r'\1 \2', chapter)
out_full.write(chapter + ';' + chapter_expand + '\n')
def chapters_dict():
"""Read index.txt and parse into dictionary of pagenames and titles"""
print 'running chapters_dict()'
chapters_dict = {}
chapters_full = os.path.join(our_path, ebook_directory, chapters_file)
in_full = open(chapters_full, 'r')
chapters = in_full.readlines()
for chapter in chapters:
chapter = chapter.strip()
(pagename, page_title) = chapter.split(';')
chapters_dict[pagename] = page_title
return chapters_dict
def page_scrape(page_name, refetch_all=False):
""" grab one page's HTML and write to file """
import urllib2
output_path = os.path.join(our_path, ebook_directory, page_name + '.html')
if not refetch_all: # check for existing file
if os.path.exists(output_path):
print '-- skipping ', page_name
return
print 'scraping ', page_name
page_url = wiki_root + page_name
page_resp = urllib2.urlopen(page_url)
page_contents = page_resp.read()
out_f = open(output_path, 'w')
out_f.write(page_contents)
def pages_scrape(refetch_all=False):
""" grab all the pages and scrape them """
path = os.path.join(our_path, ebook_directory)
chapters_full = os.path.join(path, chapters_file)
in_full = open(chapters_full, 'r')
chapters = in_full.readlines()
for chapter_line in chapters:
chapter = chapter_line.strip().split(';')[0]
page_scrape(chapter, refetch_all)
def page_clean_headers(page_name, page_contents, chapters_dict = chapters_dict()):
""" clean 1 page's header and footers"""
prefix = """<div id="page" lang="en" dir="ltr">
<div dir="ltr" id="content" lang="en"><span class="anchor" id="top"></span>
<span class="anchor" id="line-1"></span>"""
suffix = """<span class="anchor" id="bottom"></span></div>"""
title = chapters_dict[page_name]
print 'title: ', title
prefix_pos = page_contents.find(prefix) + len(prefix)
if prefix_pos <= len(prefix):
print 'bad prefix_pos'
return page_contents
page_contents = page_contents[prefix_pos:]
suffix_pos = page_contents.find(suffix)
if suffix_pos < 10:
print 'bad suffix_pos'
page_contents = page_contents[0:suffix_pos]
header = """<html><head><meta http-equiv="Content-Type" content="text/html;charset=utf-8"><title>""" + title + """</title><link href="stylesheet.css" rel="stylesheet" type="text/css" /></head><body>\n"""
title_head = "<h1>" + title + "</h1>\n"
footer = "\n</body></html>"
page_contents = header + title_head + page_contents + footer
return page_contents
def page_clean_body(page_contents, chapters_dict = chapters_dict()):
"""clean body - tags, etc."""
import re
# clean <li><p> case
pat = '<li><p[^>]*>'
page_contents = re.sub(pat, '<li>', page_contents)
# clean <li class="gap">
pat = '<li class="gap">'
page_contents = re.sub(pat, '<li>', page_contents)
# remove span tags
pat = '<span[^>]*>'
page_contents = re.sub(pat,'', page_contents)
pat = '</span>'
page_contents = re.sub(pat,'', page_contents)
# remove p class info
pat = '<p class[^>]*>'
page_contents = re.sub(pat, '<p>', page_contents)
return page_contents
def wikilog_link_clean(match, chapters_dict = chapters_dict()):
"""check link against list of pages in book, output correct link type"""
link = match.group() # whole a-href-start tag
prefix = '<a href="'
page_name = link[len(prefix):-2]
#print 'page_name', page_name
if page_name in chapters_dict.keys():
return '<a class="inbook" href="%s.html">' % (page_name)
else:
return '<a class="wikilog" href="%s%s">' % (wiki_root, page_name)
def page_clean_links(page_contents, chapters_dict = chapters_dict()):
"""clean 1 page's links"""
import re
# elim 'nonexistent' hrefs
pat = '<a class="nonexistent" href[^>]*>([^<]*)</a>'
page_contents = re.sub(pat, r'\1', page_contents)
# clarify interwiki links
print 'convert interwiki links'
pat = '<a class="interwiki" href="([^"]+)" title="([^"]+)">([^<]+)</a>'
page_contents = re.sub(pat, r'<a class="http" href="\1">\2:\3</a>', page_contents)
# convert WikiLog links to either http-online links or local.html links
pat = '<a href="([^"]+)">'
page_contents = re.sub(pat, wikilog_link_clean, page_contents)
return page_contents
def page_add_twin(page_name, page_contents):
"""add link to online-twin (plus extra closing p) at bottom of every page"""
suffix = '</body>'
suffix_pos = page_contents.find(suffix)
line = '<p><a class="wikilog" href="%s%s">(Online version of page)</a><p>' % (wiki_root, page_name)
return page_contents[0:suffix_pos] + line + page_contents[suffix_pos:]
def page_clean(page_name, chapters_dict = chapters_dict()):
"""umbrella function to call multiple cleaning functions on a page"""
page_path = os.path.join(our_path, ebook_directory, page_name + '.html')
page_contents = open(page_path, 'r').read()
page_contents = page_clean_headers(page_name, page_contents, chapters_dict)
page_contents = page_clean_body(page_contents, chapters_dict)
page_contents = page_clean_links(page_contents, chapters_dict)
page_contents = page_add_twin(page_name, page_contents)
out_f = open(page_path, 'w')
out_f.write(page_contents)
def pages_clean():
""" grab all the pages listed in index.txt and clean them """
for page_name in chapters_dict().keys():
print 'cleaning ', page_name
page_clean(page_name)
def pages_combine():
"""Take all the local HTML files and combine into single giant file for use in TwitterBot """
page_combined = ""
for page_name in chapters_dict().keys():
page_path = os.path.join(our_path, ebook_directory, page_name + '.html')
page_contents = open(page_path, 'r').read()
page_combined = page_combined + page_contents + '\r\r'
page_path = os.path.join(our_path, ebook_directory, 'PrivateWikiNotebook.txt')
out_f = open(page_path, 'w')
out_f.write(page_combined)
if __name__ == '__main__':
#other_pages_list()
#index_titles_gen()
#page_scrape('HackYourLifeWithAPrivateWikiNotebookGettingThingsDoneAndOtherSystems')
#page_scrape('PickingAWikiForYourPrivateNotebook')
#pages_scrape()
#page_clean_headers('TellYourLifeStoryThroughVariousFilters')
#page_clean('PickingAWikiForYourPrivateNotebook')
#pages_clean()
pages_combine()
| true |
26baf45eece63a545c32889c79c387c6c137b7da | Python | chensivan/codemend | /codemend/demo/nl_query.py | UTF-8 | 1,652 | 2.609375 | 3 | [] | no_license | import sys
import whoosh
import whoosh.index
import whoosh.query
sys.path.append('../docstring_parse')
from consolidate import get_method, get_class
INDEX_DIR = 'index'
class NLQueryHandler:
def __init__(self):
print 'Loading indexes...'
self.index_fu = whoosh.index.open_dir(INDEX_DIR, 'fu', True)
self.index_fau = whoosh.index.open_dir(INDEX_DIR, 'fau', True)
print 'Indexes loaded.'
self.searcher_fu = self.index_fu.searcher()
self.searcher_fau = self.index_fau.searcher()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
self.searcher_fu.close()
self.searcher_fau.close()
def findFunction(self, query):
queryObj = self.createOrQuery(query)
return self.searcher_fu.search(queryObj)
def findArg(self, func_id, query):
orQuery = self.createOrQuery(query)
funcIdQuery = whoosh.query.Term('func_id', func_id)
queryObj = whoosh.query.And([orQuery, funcIdQuery])
return self.searcher_fau.search(queryObj)
def createOrQuery(self, query):
terms = query.split()
return whoosh.query.Or([whoosh.query.Variations('utter', term) for term in terms])
if __name__ == "__main__":
"""Testing NLQueryHandler"""
with NLQueryHandler() as nqh:
results = nqh.findFunction('create horizontals bar plot')
print len(results)
for r in results[:5]:
print r.score, r['utter'], r['func_id']
results = nqh.findArg('matplotlib.axes.Axes.barh', 'add hatching pattern')
print '-------------'
print len(results)
for r in results[:5]:
print r.score, r['utter'], '-', r['arg']
| true |
40d82d96e3c61be491a6e56b34b886ad913e92a6 | Python | st0le/pyEuler | /7.py | UTF-8 | 164 | 2.625 | 3 | [] | no_license | #Project Euler 7
#http://projecteuler.net/problem=7
from utils import *
def solve():
return filter(is_prime,xrange(150000))[10000]
print "Solution #7 :",solve() | true |
b4fc232b5474a6d7b27d7f21645f8df08c6a0a9f | Python | sqbian/METABAT | /finalresult.py | UTF-8 | 10,819 | 2.53125 | 3 | [] | no_license | from Bio import SeqIO
import pandas as pd
import re
import os
import numpy as np
import pickle
from scipy.sparse import csr_matrix
from scipy.sparse.csgraph import minimum_spanning_tree
from scipy.sparse.csgraph import connected_components
from scipy.spatial.distance import cdist
import networkx as nx
import matplotlib.pyplot as plt
import pylab
from itertools import count
##Create membership matrix
###Need to change the directory which contains all of the binning result
###Run metabat N times
N = 200
print(N+1)
files = os.listdir('/Users/bian0553/Downloads/finalresult/finalbin')
if len(files) > N:
location = files.index('.DS_Store')
del files[location]
num = 1
headers = ["Contig",'Membership']
meta = pd.DataFrame()
for fn in files:
handle = pd.read_csv("/Users/bian0553/Downloads/finalresult/finalbin/"+fn, names = headers,sep = '\t')
meta['contigs']= handle['Contig']
meta[num] = handle['Membership']
num = num+1
meta = meta.replace(0,np.nan)
##check if NA shows up in the last row of matrix.
lastrow = meta.iloc[meta.shape[0]-1][1:N+1]
##The last row cannot have NA
if(sum(pd.isnull(lastrow)) == N):
##get to know the first elements without NA showing up
for i in range(meta.shape[0]):
row = meta.iloc[i][1:N+1]
if(sum(pd.isnull(row)) == 0):
num = i
break
##Move the row just found without NA showing to the last row
meta.loc[meta.shape[0]] = meta.iloc[num]
meta = meta.drop(num)
##get final membership
meta = meta.reset_index(drop = True)
##load the contig size document (note the directory changing)
contigsize = pd.read_csv("contigs.txt",sep = None,header =0,engine = 'python')
###################################do subsampling#####################################################
####If you get the membership data from the assembly.fa.subset, ignore this function.
###If get the membership data from the whole dataset and you want to do the subsampling
###then run the subsampling function
###Input the membership data (meta) and the file contains contigs size table. Return subsampled membership matrix
def subsampling(result,congtigsize):
#get how many genomes in total
genome = []
for name in result['contigs']:
a = re.sub('_[0-9]*-[0-9]*',"",name)
m = re.search('[A-Za-z\|0-9\.]*_\[([A-Za-z_0-9]*)\]',a)
genome.append(m.group(1))
genome_sum = np.unique(genome)
result['genome'] = genome
#get genome table
columns = ['genome','number']
index = np.arange(len(genome_sum)) # array of numbers for the number of samples
gentable = pd.DataFrame(columns=columns, index = index)
num = 0
for unique in genome_sum:
gentable['genome'][num]= unique
gentable['number'][num]=genome.count(unique)
num = num+1
genome = []
for num in range(len(gentable['genome'])):
if gentable['number'][num] <= 1000 and gentable['number'][num] >= 300:
genome.append(gentable['genome'][num])
sample1 = {}
for gen in genome:
loc =np.random.choice(np.where(result['genome'] == gen)[0],100)
contig = []
for i in loc:
contig.append(result['contigs'][i])
sample1[gen] = contig
##get size for each items
sizedic = {}
num = 0
for key in sample1:
size = []
for item in sample1[key]:
index = list(contigsize['Name']).index(item)
size.append(contigsize['Size'][index])
sizedic[num] = size
num = num+1
##get sampling
sample = {}
num = 0
for key in sample1:
sample[num] = sample1[key]
num = num + 1
total = []
for key in sizedic:
total.append(sum(sizedic[key]))
new = {}
for key in sizedic:
count = sum(sizedic[key])
while (count > 330000):
deletloc = sizedic[key].index(max(sizedic[key]))
del sample[key][deletloc]
sizedic[key].remove(max(sizedic[key]))
count = sum(sizedic[key])
total = []
for key in sizedic:
total.append(sum(sizedic[key]))
size = []
newitem = []
for key in sample:
for item in sample[key]:
newitem.append(item)
loc = []
for item in newitem:
loc.append(list(result['contigs']).index(item))
test = result.ix[loc]
test = test.reset_index(drop = True)
return test
#newdata = subsampling(meta,contigsize)
####Then use 'newdata' as the new generated membership matrix in the following steps
########################################################################################
### meta is the membership matrix
def coassociation_matrix(iter,meta):
rows = []
cols = []
for label in meta[iter].unique()[~np.isnan(meta[iter].unique())]:
indices = np.where(meta[iter] == label)[0]
for index1 in indices:
for index2 in indices:
rows.append(index1)
cols.append(index2)
data = np.ones((len(rows),))
coam = csr_matrix((data, (rows, cols)), dtype='float')
return coam
def compresult(coassociation_matrix,t,membership_matrix):
mst = minimum_spanning_tree(-coassociation_matrix)
mst = mst.toarray()
mst[mst>t] = 0
number_of_clusters, labels = connected_components(mst)
G = nx.from_scipy_sparse_matrix(csr_matrix(mst))
comp = list(nx.connected_components(G))
binning_test = {}
for i in range(len(comp)):
value = []
for index in comp[i]:
value.append(membership_matrix['contigs'][index])
binning_test[i] = value
return binning_test
def getsize(binning_test,contigsize):
##get size for each items
sizedic = {}
num = 0
for key in binning_test:
if len(binning_test[key]) > 1:
size = []
for item in binning_test[key]:
index = list(contigsize['Name']).index(item)
size.append(contigsize['Size'][index])
sizedic[num] = size
num = num+1
return sizedic
def cleanbin(binning_test):
####remove the contig numbers
binning = {}
for key in binning_test:
new = []
for value in binning_test[key]:
new.append(re.sub('_[0-9]*-[0-9]*',"",value))
binning[key] = new
###get the [..] genome name
for key in binning:
new = []
for item in binning[key]:
m = re.search('[A-Za-z\|0-9\.]*_\[([A-Za-z_0-9]*)\]',item)
new.append(m.group(1))
binning[key] = new
newbin = {}
num = 0
for key in binning:
if len(binning[key])>1:
newbin[num] = binning[key]
num = num + 1
return newbin
def genomesize(binning, sizedic):
####calculate the total size which has the same genome name inside[..]
total = {}
for key in binning:
value = np.unique(np.asarray(binning[key]))
totalsize = []
for item in value:
loc = np.where(np.asarray(binning[key]) == item)[0]
size = 0
for i in loc:
size = size + sizedic[key][i]
totalsize.append(size)
total[key] = totalsize
return total
##input size dictionary and cleaned binning dictionary
def getrecall(total,binning):
### get the location with the largest sizes
maxloc = []
for key in total:
maxloc.append(total[key].index(max(total[key])))
###get the represented genome in each cluster
genome = []
for key in binning:
allgen = np.unique(np.asarray(binning[key]))
genome.append(allgen[maxloc[key]])
######get the size of represented genome in each cluster
gensize = []
for i in range(len(genome)):
size = 0
for key in binning:
allgen = np.unique(np.asarray(binning[key]))
if(genome[i] in allgen):
loc = np.where(allgen == genome[i])[0]
size = size + total[key][loc]
gensize.append(size)
recall = []
for key in total:
nume = max(total[key])
denom = gensize[key]
recall.append(float(nume)/float(denom))
return recall
def getprecision(total):
precision = []
for key in total:
nume = max(total[key])
denom = sum(total[key])
precision.append(float(nume)/float(denom))
return precision
def gettable(genedic):
df = pd.DataFrame()
df['Reference'] = contigsize['New'].unique()
num = 0
for key in genedic:
if len(genedic[key]) > 1:
df[num] = np.zeros(len(contigsize['New'].unique()))
for item in genedic[key]:
loc = np.where(item == df['Reference'])[0]
df[num][loc] = df[num][loc] + 1
num = num+1
return df
###get coassociation matrix
c = coassociation_matrix(1,meta)
for i in range(2,N+1):
c = c + coassociation_matrix(i,meta)
########plot the relationship between the F1 mean score with each time metabat running.########
index = range(1,N+1)
fscore = []
finalf = []
for i in index:
c = coassociation_matrix(i,meta)
binning_test = compresult(c,0,meta)
sizedic = getsize(binning_test,contigsize)
binning = cleanbin(binning_test)
total = genomesize(binning, sizedic)
precision = np.asarray(getprecision(total))
recall = np.asarray(getrecall(total,binning))
f1score = 2*(precision*recall)/(precision+recall)
fscore.append(f1score.mean())
finalf.append(max(fscore))
print(i)
fig = plt.figure()
ax = fig.add_subplot(111)
plt.plot(index,finalf)
plt.xlabel('each time of metabat run')
plt.ylabel('F1 score')
plt.title('Plot for F1 score with each time metabat')
x = fscore.index(max(finalf))+1
y = max(finalf)
xy=(x,y)
ax.annotate('(%s, %s)' % xy, xy=xy, textcoords='data')
plt.grid(True)
plt.savefig("onetimef1score.png")
#######################plot the curves
c = c/N
trange = np.arange(0.1,1.0,0.1)
fscore = []
roc = []
poc = []
for t in trange:
binning_test = compresult(c,-t,meta)
sizedic = getsize(binning_test,contigsize)
binning = cleanbin(binning_test)
total = genomesize(binning, sizedic)
precision = np.asarray(getprecision(total))
recall = np.asarray(getrecall(total,binning))
f1score = 2*(precision*recall)/(precision+recall)
fscore.append(f1score.mean())
roc.append(recall.mean())
poc.append(precision.mean())
print(t)
plt.plot(trange,roc,label = 'recall')
plt.plot(trange,poc, label = 'precision')
plt.plot(trange,fscore,label = 'f1 score')
plt.xlabel('threshold')
plt.ylabel('score')
plt.title('Score VS Threshold choosing')
plt.grid(True)
plt.legend(loc = 3)
plt.savefig("score.png")
####check the relationship between f1 score and the replication numbers
##This is computationally expensive. Will run couple days
c = coassociation_matrix(1,meta)
binning_test = compresult(c,0,meta)
sizedic = getsize(binning_test,contigsize)
binning = cleanbin(binning_test)
total = genomesize(binning, sizedic)
precision = np.asarray(getprecision(total))
recall = np.asarray(getrecall(total,binning))
f1score = 2*(precision*recall)/(precision+recall)
score = []
score.append(f1score.mean())
for i in range(2,N+1):
print(i)
c = c + coassociation_matrix(i,meta)
newc = c/i
trange = np.arange(0.1,1.0,0.1)
fscore = []
for t in trange:
print(t)
binning_test = compresult(c,-t,meta)
sizedic = getsize(binning_test,contigsize)
binning = cleanbin(binning_test)
total = genomesize(binning, sizedic)
precision = np.asarray(getprecision(total))
recall = np.asarray(getrecall(total,binning))
f1score = 2*(precision*recall)/(precision+recall)
fscore.append(f1score.mean())
score.append(max(fscore))
plt.plot(range(1,N+1),score,label = 'f1 score')
plt.xlabel('replication')
plt.ylabel('F1 score')
plt.title('Score VS Replication number')
plt.grid(True)
plt.savefig("scoreandrep.png")
| true |
6f112731d1fed9bccd8aa8b0ee089610b6b6a661 | Python | Lolization/MAchtung | /server.py | UTF-8 | 3,887 | 2.609375 | 3 | [] | no_license | import time
from socket import socket, AF_INET, SOCK_STREAM, error
from _thread import *
from typing import Union
import pickle
from room import Room
from game import Game
from globe import *
from account import Account
# TODO - proper game cycle:
# TODO - 1. points counter
# TODO - 2. menu
# TODO - 3. messages to the user (you win, you lose, etc.)
# TODO - create an .exe file
rooms = []
s = None
lobby_conns = []
room_cons = {}
def main():
# TODO - organize, annoy Termiland
create_socket()
while True:
conn, address = s.accept()
lobby_conns.append(conn) # TODO: Email and pass thing verification
print("Connected to:", address)
username, password = pickle.loads(conn.recv(1048))
print(username, password, " entered")
# TODO: Check if account already exists (Wrong pass, get info, etc.)
account = Account(username, password)
start_new_thread(threaded_client, (conn, account))
def create_socket():
global s
s = socket(AF_INET, SOCK_STREAM)
try:
s.bind(("0.0.0.0", PORT))
except error as e:
str(e)
s.listen(2)
print("Waiting for a connection, Server Started")
def get_room(identifier):
for room in rooms:
if room.id == identifier:
return room
return None
def create_room(connection, acc):
new_room = Room()\
.add_account(acc)
lobby_conns.remove(connection)
for con in lobby_conns:
con.sendall(pickle.dumps(new_room))
connection.sendall(pickle.dumps(new_room))
rooms.append(new_room)
room_cons[new_room.id] = [connection]
return new_room
def join_room(connection: socket, acc: Account, room_id: int) -> Room:
room = get_room(room_id) # type: Room
for con in room_cons[room_id]: # type: socket
con.sendall(pickle.dumps(acc))
connection.sendall(pickle.dumps(room.accounts))
room.add_account(acc)
room_cons[room_id].append(connection)
return room
def threaded_client(conn: socket, account: Account) -> None:
global s
while True:
conn.sendall(pickle.dumps((account, rooms)))
room = None # type: Union[None, Room]
lobby = True
while lobby:
msg = pickle.loads(conn.recv(1042))
if msg:
action, placeholder = msg
if action == "Join":
room_id = placeholder
room = join_room(conn, account, room_id)
lobby = False
elif action == "Create":
room = create_room(conn, account)
lobby = False
else:
print("PLEASE HELP ME")
msg = pickle.loads(conn.recv(1024))
while msg is None:
msg = pickle.loads(conn.recv(1024))
if msg == "ready":
account.ready = True
if room.is_ready():
for con in room_cons[room.id]:
con.sendall(pickle.dumps("ready"))
room.running = True
while not room.running:
pass
print("round started")
players = [account.player for account in room.accounts]
player_num = room.accounts.index(account)
if room.game is None:
game = Game(players)
room.game = game
if room.game.current_round is None:
current_round = room.game.create_round()
else:
current_round = room.game.current_round
initial_players = []
for i in range(len(current_round.snakes)):
if i != player_num:
initial_players.append(current_round.snakes[i])
if not current_round.start:
current_round.start_game()
print("started game")
message = (current_round.snakes[player_num], initial_players)
print(message)
time.sleep(1)
conn.sendall(pickle.dumps(message))
while True:
try:
reply = []
data = pickle.loads(conn.recv(1042))
if data == "lost":
break
current_round.snakes[player_num].add(data)
if not data:
print("Disconnected")
break
else:
for i in range(len(current_round.snakes)):
if i != player_num:
reply.append(current_round.snakes[i].head)
conn.sendall(pickle.dumps(reply))
except error as e:
print(e)
break
lobby_conns.append(conn)
if __name__ == "__main__":
main()
| true |
f4f0373a5b79e00e7c3e8040000cb13877726083 | Python | Vincentlz/AnimeArtifactPro | /Utils/CrawlInterfaces/CrawlImpl_05.py | UTF-8 | 3,527 | 2.640625 | 3 | [
"Apache-2.0"
] | permissive | import requests
import re
import json
import threading
from Utils.CrawlUtil import CrawlUtil
"""
爬取资源网
http://www.zuidazy4.com/
"""
class CrawlImpl_05(CrawlUtil):
URL = 'http://www.zuidazy4.com/index.php'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4287.0 Safari/537.36 Edg/88.0.673.0'
}
def search(self, searchword):
url = 'http://www.zuidazy4.com/index.php'
params = {
'm': 'vod-search',
'wd': searchword,
'submit': 'search'
}
resp = requests.get(url=url, params=params, headers=self.headers)
txt = resp.text
reg = '<li><span class="tt"></span><span class="xing_vb4"><a href="(.*?)" target="_blank">(.*?)<span>(.*?)</span></a></span> <span class="xing_vb5">(.*?)</span> <span class="xing_vb6">(.*?)</span></li>'
res = re.findall(reg, txt)
if len(res) > 16:
res = res[0:16]
pass
result = []
for item in res:
result.append({
'url': 'http://www.zuidazy4.com/' + item[0],
'cover': 'cover',
'title': item[1],
'latest': item[2],
'area': item[3],
})
pass
# 开启线程获取封面地址
i = 1
threads = []
for item in result:
thread_get_cover = threading.Thread(target=self._getCover, name='thread_get_cover', args=(result, i))
threads.append(thread_get_cover)
thread_get_cover.start()
i += 1
pass
for t in threads:
t.join()
pass
return result.__str__().replace('\'', '\"')
pass
def parseSearchResult(self, jsonobj):
# print('jsonobj', jsonobj)
print('进入解析')
result = []
for item in jsonobj:
result.append({
'url': item['url'],
'cover': item['cover'],
'title': item['title'],
'latest': item['latest'],
'area': item['area'],
'time': '时间',
'stars': '演员'
})
pass
return result
pass
def detail(self, func, url):
result = {}
resp = requests.get(url=url, headers=self.headers)
txt = str(resp.text)
txt = txt.replace(' ', '').replace('\n', '')
reg = '<divid="play_1">(.*?)</div'
res = re.findall(reg, txt)[0]
reg = r'<li><inputtype="checkbox"name="copy_sel"value=".*?"checked=""/>(.*?)\$(.*?)</li>'
res = re.findall(reg, res)
for i in range(1, len(res) + 1):
value = [res[i - 1][0], res[i - 1][1]]
result.update({i: value})
pass
return result
def getVideoUrl(self, url):
# 因为url已经是m3u8的直链了,所以直接返回
return url
pass
def _getCover(self, result, index):
result[index - 1]['cover'] = self.getCover(result[index - 1]['url'], 3)
pass
def getCover(self, url, timeout):
txt = requests.get(url=url, headers=self.headers).text
txt = txt.replace(' ', '').replace('\n', '')
reg = r'<imgclass="lazy"src="(.*?)"alt='
res = re.findall(reg, txt)[0]
return res
pass
if __name__ == '__main__':
crawl = CrawlImpl_05()
crawl.search('legal')
pass
| true |
3b491ecb58f61ebc2e40d9aaeac40026aedc3f2e | Python | RafaelBroseghini/Pythonista | /04-Algorithms/Infix to Postfix Notation/infix_to_postfix.py | UTF-8 | 1,267 | 3.96875 | 4 | [] | no_license | #!/usr/bin/env python
"""
Implementation of the conversion from infix
to postfix notation using a stack.
"""
from pythonds.basic.stack import Stack
def convert(expression: str) -> str:
expression = list(expression)
output = []
opstack = Stack()
precedence = {"(": 1, "-": 2, "+": 2, "/": 3, "*": 3}
for token in expression:
if token in "ABCD":
output.append(token)
elif token == "(":
opstack.push(token)
elif token == ")":
topElement = opstack.pop()
while topElement != "(":
output.append(topElement)
topElement = opstack.pop()
elif token in precedence:
lowerPrec = False
while opstack.size() > 1 and not lowerPrec:
if (
opstack.peek() in precedence
and precedence[opstack.peek()] >= precedence[token]
):
output.append(opstack.pop())
else:
lowerPrec = True
opstack.push(token)
while not opstack.isEmpty():
output.append(opstack.pop())
return " ".join(output)
def main():
print(convert("(A+B)*(C+D)"))
if __name__ == "__main__":
main()
| true |
44fcfb8a3ad6989b97d061b1ff0bdc12f571129f | Python | hill-a/python-runtime-check | /runtime_check/check_type.py | UTF-8 | 4,449 | 3.390625 | 3 | [
"LicenseRef-scancode-public-domain"
] | permissive | """
This module is used for type checking
"""
from typing import List, Union, Dict, Tuple, Any, Set, TypeVar, Callable, Mapping, Iterator, Iterable
import numpy as np
DEEP = False
class _TypeCheckerMeta(type):
"""
Meta class used for the TypeChecker[] notation, also contains the checking code.
"""
@classmethod
def _check_type(mcs, key, val): #TODO: add the remainding typing objects (generator, ...)
"""
Checks whether a value is of a specific type.
:param val: (Any)
:param key: (Type or Typing object)
:return: (bool) is of type
"""
if key == Any:
return True
elif type(key) == type(Union):
return any([mcs._check_type(k, val) for k in key.__args__])
elif isinstance(key, TypeVar):
return any([mcs._check_type(k, val) for k in key.__constraints__])
elif issubclass(key, List):
valid = isinstance(val, List)
if DEEP and valid and key.__args__ is not None:
return all([mcs._check_type(key.__args__[0], v) for v in val])
else:
return valid
elif issubclass(key, Set):
valid = isinstance(val, Set)
if DEEP and valid and key.__args__ is not None:
return all([mcs._check_type(key.__args__[0], v) for v in val])
else:
return valid
elif issubclass(key, Dict):
valid = isinstance(val, Dict)
if DEEP and valid and key.__args__ is not None:
return all([mcs._check_type(key.__args__[0], k) and
mcs._check_type(key.__args__[1], v) for (k, v) in val.items()])
else:
return valid
elif issubclass(key, Tuple):
valid = isinstance(val, Tuple) and (key.__args__ is None or len(key.__args__) == len(val))
if DEEP and valid and key.__args__ is not None:
return all([mcs._check_type(k, v) for k, v in zip(key.__args__, val)])
else:
return valid
elif type(key) == type(Callable): # will not do in depth checking, only shallow.
return callable(val)
elif issubclass(key, Mapping): # will not do in depth checking, only shallow.
return isinstance(val, map)
elif issubclass(key, Iterator): # will not do in depth checking, only shallow.
return isinstance(val, Iterator)
elif key == type(None) or key == None:
return val is None
elif val is None:
return False
else:
try:
return isinstance(val, key)
except Exception as ex: # pragma: no cover
print("Error: occured when comparing {} to class {}".format(val, key))
raise ex
@classmethod
def _validater(mcs, key):
"""
Returns a checking function that checks that a value in allowed by key.
:param key: (Type or Typing object)
:retrun: (callable) function that takes value and will raise an error if not valid
"""
def check(val):
"""
Checks that val is valid, will raise an error if not valid.
:param val: (Any)
"""
if not mcs._check_type(key, val):
raise TypeError("Expected {}, got {}".format(key, val.__class__))
return check
def __getitem__(mcs, key):
if isinstance(key, (Tuple, List, Set)):
return mcs._validater(Union[tuple(key)])
else:
return mcs._validater(key)
class TypeChecker(object, metaclass=_TypeCheckerMeta):
"""
Class used to check whether a value is of a specific type.
ex:
TypeChecker[int, float](0)
TypeChecker.array(numpy.arange(10))
you may use typing.Union[int, float] for mutliple valid types
or List[int], Dict[str, int], Optional[int].
"""
@classmethod
def scalar(cls, val):
"""
Checks whether val is a number.
"""
cls._validater(Union[int, float])(val)
@classmethod
def numpy_array(cls, val):
"""
Checks whether val is a numpy array.
"""
cls._validater(np.ndarray)(val)
@classmethod
def iterable(cls, val):
"""
Checks whether val is an Iterable.
"""
cls._validater(Union[np.ndarray, Iterable])(val)
| true |
dfe9c476a61b9b851cecd924167fc159ffa494e7 | Python | qchehuang/python_practice | /requests/main.py | UTF-8 | 244 | 2.625 | 3 | [] | no_license | from urllib import request
def visit_baidu():
URL = "http://www.baidu.com"
req = request.urlopen(URL)
html = req.read()
html = html.decode("utf_8")
print(html)
if __name__ == '__main__':
visit_baidu()
print('test') | true |
37b5cd491e0cf5084ce8117848edd39b22246392 | Python | GauDelpech/Projet_M1 | /Question2_3.py | UTF-8 | 2,052 | 2.984375 | 3 | [] | no_license | from Kabuki import Kabuki
import matplotlib.pyplot as plt
import pygame
import sys
toto = Kabuki(0, 0)
def differe(tortue=Kabuki(), pas=0.1, nb_pas=1000):
i=0
j=0
vt=0.1
vr = -vt
while i <= nb_pas:
# if int(j) == 314:
# j=0
# vr = -vr
toto.control(vt, vr)
tortue.new_pos(pas)
j += 1
i += pas
plt.plot(tortue.histx, tortue.histy)
plt.title('Trajectoire du robot pour vtrans =' + str(vt) + ' et vrot = ' + str(vr))
plt.show()
def realtime(tortue=Kabuki(), pas=0.1):
pygame.init() # initialise pygame
clock = pygame.time.Clock() # initialise l'hotloge utilisé par pygame
fenetre = pygame.display.set_mode((1920, 1080), pygame.RESIZABLE) # initialise la fenêtre d'affichage
fond = pygame.Surface(fenetre.get_size()) # surface fond blanc
fond.fill((255, 255, 255)) # replie la surface de blanc
traine = pygame.Surface(fenetre.get_size()) # surface permetant d'afficher la traîné
traine.set_colorkey((0, 0, 0)) # Rend transparent les éléments noir de la surface
while 1: # boucle d'affichage
tortue.new_pos(pas) # calcule un nouveau pas
fenetre.blit(fond, (0, 0)) # applique le fond à la fenêtre
for j in range(len(tortue.histx)): # calcul de la traîné
x = int(tortue.histx[j])
y = int(tortue.histy[j])
traine.set_at((int(x/5)+960, -int(y/5)+540), tortue.color) # applique le nouveau point à la traîné
fenetre.blit(traine, (0, 0)) # applique la traîné à la fenêtre
pygame.draw.circle(fenetre, tortue.color, (int(tortue.x/5+960), int(-tortue.y/5+540)), int(tortue.ecart/10))
# dessine le cercle repésentant le robot
pygame.display.flip() # affiche la fenêtre
clock.tick(30) # affiche à 30 fps
for event in pygame.event.get(): # si on clique sur "quitter" le programme s'arrête
if event.type == pygame.QUIT: sys.exit()
#realtime(toto)
differe(toto) | true |
e927e8149bce3b44b40d258e4a991128f6256406 | Python | geekylink/coin-scripts | /cloudy/BotLogic.py | UTF-8 | 1,089 | 2.90625 | 3 | [] | no_license | currentBuy = 0
currentSell = 0
currentBuyVol = 0
currentSellVol = 0
LastBought = 0
LastSold = 0
Factor = 0
c = Cryptsy()
c.update_markets()
while (true)
time.sleep(30.0)
c.update_orders_by_market(c.markets["DOGE/BTC"])
update()
def update()
currentBuy = c.markets["DOGE/BTC"].buy_orders[0].price
currentSell = c.markets["DOGE/BTC"].sell_orders[0].price
currentBuyVol = c.markets["DOGE/BTC"].total_buys_above(float(buy)))
currentSellVol = c.markets["DOGE/BTC"].total_sells_below(float(sell)))
checkNumbers()
def checkNumbers()
decision = 0
if (math.fabs(currentBuy - currentSell) > 2) {
decision--
}
if (math.fabs(currentBuy - currentSell) < 2) {
decision++
}
if (currentBuyVol > currentSellVol) {
decision--
}
if (currentBuyVol < currentSellVol) {
decision++
}
if (currentSell > LastBought) {
decision--
}
if (currentBuy > LastSell) {
decision++
}
decision = decision + Factor
Decide(decision)
def Decide(num)
if (num > 0) {
Factor = 0
Buy(CurrentBuy)
}
if (num < 0) {
Factor = 0
Sell(CurrentSell)
}
if (num == 0) {
Factor++
}
| true |
aae05045c8ba4975a4dc752cc9ec45f8f8113d2f | Python | bourbon08/my_projecteuler | /1-100/3-ok/compare.py | UTF-8 | 481 | 3.328125 | 3 | [] | no_license | import math
def mys(n):
for i in range(2, int(math.sqrt(n)) + 1):
if n % i == 0:
return n//i
return n
def otherss(n):
i = 2
largest_prime = 2
while i*i <= n:
while n % i == 0:
largest_prime = i
n //= i
i += 1
if n>largest_prime:
largest_prime = n;
return largest_prime
for i in range(1000):
if mys(i)!=otherss(i):
print(str(i)+" "+str(mys(i))+" "+str(otherss(i))) | true |
eb81e9688be45b7f837b179a98cefab82a203feb | Python | thran/the_code | /adventOfCode/2018/19/19fake.py | UTF-8 | 103 | 3.015625 | 3 | [] | no_license | n = 10551389
# n = 989
s = 0
for a in range(1, n +1):
if n / a == n // a:
s += a
print(s)
| true |
34e890d773fe463af8f7a29f28c3859e6e1db0cc | Python | nanaky92/MongoDB-course | /W1/handling_form_content.py | UTF-8 | 401 | 2.671875 | 3 | [] | no_license | @bottle.post("/favourite_fruit")
def favourite_fruit():
fruit = bottle.request.forms.get("fruit")
if (fruit==None or fruit == ""):
fruit = "No fruit selected"
return bottle.template("fruit_selection", {"fruit": fruit})
<form action="/favourite_fruit" action="POST">
What is your favourite route
<input type="text" name="fruit" size=40 value=""><br>
<input type="submit" value="Submit">
</form>
| true |
27f7a8d19944a1081b7010dabae4cc2c1cdf1b61 | Python | SathvikPN/beginPython | /starter-exercise/Prime-check.py | UTF-8 | 213 | 3.8125 | 4 | [] | no_license | #Prime Check
def PrimeCheck(n):
for i in range(2,(n//2)):
if n%i==0:
return ("NOT prime")
return ("Prime")
n = int(input("Number: "))
print(n,"is",PrimeCheck(n))
| true |
9df2e9251e02c086ef219b03c6361d936e57df30 | Python | arnemagnus/_oldversion_physicsproject | /calculation_scripts/hpc-stuff/integrator_efficiency/numerical_integrators/__init__.py | UTF-8 | 2,592 | 3.046875 | 3 | [] | no_license | """ This module contains a variety of numerical integration schemes,
including both single-, multi- and adaptive timestep methods.
The various integrators have different stability properties and
accuracy levels, which means that they are suitable for problems
of different complexities and required levels of precisions.
With the single- and multistep methods, the price one pays for
increasing precision is, put simply, more floating-point
operations. This increases computation time, and increases the
numerical errors in the proposed solutions.
Adaptive timestep methods, on the other hand, can be trickier to
parallellize than their fixed timestep siblings. This is because
two different trajectories are generally traversed with two
different time steps.
All the numerical integrators have the same structure:
def scheme(t, x, h, f, atol, rtol):
[...]
return _t, _x, _h
where t: Current time level
x: Current coordinates, array-like
h: Current time increment (fixed for fixed stepsize
methods, generally variable in adaptive stepsize
methods)
f: Function handle for the derivatives (the RHS of the ODE
system), function signature: f = f(t, x)
atol: Absolute tolerance level (OPTIONAL, AND THAT ONLY FOR
ADAPTIVE STEPSIZE METHODS)
rtol: Relative tolerance level (OPTIONAL, AND THAT ONLY FOR
ADAPTIVE STEPSIZE METHODS)
_t: New time level (fixed stepsize integrators always
take one step forwards, whereas adaptive stepsize
integrators do not if the trial solution is
rejected, returning instead the current time level,
unaltered)
_x: Approximation of the coordinates at the new time level
(always the case for fixed stepsize integrators,
not the case for addaptive stepsize integrators if
the trial solution is rejected, returning instead
the current coordinates, unaltered)
_h: New time increment (unaltered in fixed stepsize
methods, generally increased or decreased in
adaptive stepsize methods)
"""
# Changelog:
# 2017-09-19: File created
#
# Written by Arne Magnus T. Løken as part of a specialization
# project in physics at NTNU, fall 2017.
import numerical_integrators.adaptive_step
import numerical_integrators.multistep
import numerical_integrators.single_step
| true |
8099074bad3e93465748e68cc00f20a1315a1ac4 | Python | biserx/QueueMonitoringService | /main.py | UTF-8 | 8,756 | 2.53125 | 3 | [] | no_license | import urllib, urllib.request, urllib.error, urllib.parse
import time, datetime
import xml.etree.ElementTree as ET
import re
import sys,json
inputXML = "http://192.168.0.2/path/to/xml/with/data.xml"
configURL = "http://remote.server.com/config.php"
uploadURL = "http://remote.server.com/upload.php"
# Time when service can reset all conters (some time when office is not working)
dailyResetAt = 4 # at hours / 24h format
# Service is based on pooling, so this is the period how long it should wait between two poolings
sleepDelay = 5 # seconds to wait between two iterations
# When there are not changes in the office (one client is spending too much time on the window)
# this will force the script to upload data to server so we have the information that service is running
forceUpdateInterval = 30 # seconds
# Statistic variables. It's based on one hour interval or last 20 clients
statisticInterval = 3600 # seconds
statisticMaxSamplesCount = 20 # samples count
systemStartupTimestamp = time.time()
lastUpdateTimestamp = 0
windowsCount = 0
windows = {}
# ---------------------------------------------------------------
# Classes
class Window:
def __init__(self,name,min,max):
self.name = name
self.min = min
self.max = max
self.ticket = ""
self.queue = 0
self.timestamp = str2int('%d' % round(time.time()))
self.statistic_data = []
self.statistic = 0
def name(self):
return self.name
def min(self):
return self.min
def max(self):
return self.max
def ticket(self):
return self.ticket
def queue(self):
return self.queue
def timestamp(self):
return self.timestamp
def statistic_data(self):
return self.statistic_data
def statistic(self):
return self.statistic
# ---------------------------------------------------------------
# System
# Returns 1 if it is time to reset the system
def isDailySystemRestart():
global dailyResetAt
now = datetime.datetime.now()
t1 = now.replace(hour=dailyResetAt, minute=0, second=0, microsecond=0)
t2 = now.replace(hour=dailyResetAt, minute=0, second=10, microsecond=0)
if (now > t1 and now < t2):
return 1
return -1
# Resets all system settings
def resetSystemVariables():
global systemStartupTimestamp, lastUpdateTimestamp
global windowsCount, windows
global statisticData
print ("Restarting system now...")
systemStartupTimestamp = time.time()
lastUpdateTimestamp = 0
windowsCount = 0
windows = {}
try:
xml = getCurrentState(inputXML)
if(xml == -1):
print ("Unable to retrieve XML error!")
sys.exit()
result = configureWindows(xml);
if (result < 0):
print("Processing XML error: " + str(result));
sys.exit()
uploadConfiguration()
except:
print ("Crap happend. Exit.")
sys.exit()
# Function for parsing strings into integers
def str2int(str):
return (-1 if str[0] == '-' else 1) * int(re.sub("\D","",str))
# Returns json string for server configuration
def getConfigJSON():
global windows
return json.dumps([{'id':i,
'name':windows[i].name.decode("utf-8"),
'min':windows[i].min,
'max':windows[i].max}
for i in windows])
# Returns json string for server update
def getUpdateJSON():
global windows
return json.dumps([{'id':i,
'ticket':windows[i].ticket,
'queue':windows[i].queue,
'statistic':windows[i].statistic,
'timestamp':windows[i].timestamp}
for i in windows])
# ---------------------------------------------------------------
# Communication to local ticketing machine
# Function for fetching xml from remote url
def getCurrentState(url):
try:
request = urllib.request.Request(url)
response = urllib.request.urlopen(request, timeout=3)
return response
except:
return -1
# These functions work with predefined XML structure
# by the ticketing machine in the waiting room of Student Center.
# -2 : caught error
# -1 : no windows detected
# 0 : no changes
def configureWindows(xml):
global windows, windowsCount
try:
tree = ET.parse(xml)
root = tree.getroot()
lst = root.findall("rgroups/rgroup")
if (len(lst) > 0):
windowsCount = len(lst)
else:
return -1
for rgroup in lst:
id = str2int(rgroup.get("id"))
name = "n/a"
min = "n/a"
max = "n/a"
for item in rgroup:
if (item.tag == "name"):
name = item.text.encode('utf-8')
if (item.tag == "min"):
min = item.text
if (item.tag == "max"):
max = item.text
windows[id] = Window(name, min, max)
return 0
except:
return -2
# -1 : caught error
# 0 : no changes
# >0 : number of changes
def updateWindowsState(xml):
global windows
try:
changes = 0;
tree = ET.parse(xml)
root = tree.getroot()
lst = root.findall("rgroups/rgroup")
for rgroup in lst:
id = str2int(rgroup.get("id"))
nextTicketOnVendingMachine = "0"
for item in rgroup:
if (item.tag == "next"):
nextTicketOnVendingMachine = item.text # temp
if (item.tag == "queue"):
if (item.text) :
windows[id].queue = str2int(item.text)
else :
windows[id].queue = 0
ticket = "XXX"
if (nextTicketOnVendingMachine.isdigit()):
ticket = str(str2int(nextTicketOnVendingMachine) - windows[id].queue).zfill(3)
else:
ticket = nextTicketOnVendingMachine[0:1] + str(str2int(nextTicketOnVendingMachine[1:3]) - windows[id].queue).zfill(2)
# Very important when too many people in the queue so the numbers starts from beginning
# Works only for one looping
if (ticket < windows[id].min):
if (nextTicketOnVendingMachine.isdigit()):
ticket = str(str2int(ticket) + str2int(windows[id].max) - str2int(windows[id].min)).zfill(3)
else:
ticket = str(str2int(ticket[1:3]) + str2int(windows[id].max[1:3]) - str2int(windows[id].min[1:3])).zfill(2)
if (windows[id].ticket != ticket):
windows[id].ticket = ticket
windows[id].statistic_data.append(str2int('%d' % round(time.time())) - windows[id].timestamp)
if (len(windows[id].statistic_data) > statisticMaxSamplesCount):
windows[id].statistic_data.pop(0)
if (sum(windows[id].statistic_data) != 0):
windows[id].statistic = round( statisticInterval * len(windows[id].statistic_data) / sum(windows[id].statistic_data) , 2)
else:
windows[id].statistic = 0
windows[id].timestamp = str2int('%d' % round(time.time()))
changes = changes + 1
return changes
except:
return -1
# ---------------------------------------------------------------
# Communication to server
def uploadConfiguration():
global configURL
data = getConfigJSON()
while (1):
code = uploadToServer(configURL, data)
if (code == 0):
print("Configuration uploaded sucessfuly!")
return 0
else:
print("Configuration upload failed: " + str(code) + ". Repeat.")
def uploadData():
global lastUpdateTimestamp, uploadURL
data = getUpdateJSON()
failedUploadsCount = 0
while (1):
code = uploadToServer(uploadURL, data)
if (code == 0):
lastUpdateTimestamp = time.time()
print("Data upload done")
break
elif(failedUploadsCount >= 3):
print("Skip this upload..")
break
else:
failedUploadsCount = failedUploadsCount + 1
print("Upload failed: " + str(code) + ". Repeat.")
def uploadToServer(url, data):
values = {'data' : data}
try:
data = urllib.parse.urlencode(values)
data = data.encode('utf-8')
request = urllib.request.Request(url)
response = urllib.request.urlopen(request, data, timeout=3)
except:
return -1
rcvData = response.read().decode('utf-8')
if (rcvData[0:2] == "OK"):
return 0
elif (rcvData[0:5] == "ERROR"):
return rcvData[0:rcvData.find(";")]
return rcvData
# ---------------------------------------------------------------
# Main
# At the very beginning it's necessary to reset all system variables
resetSystemVariables()
while(1):
# Reset system variables if it is specified time
if (isDailySystemRestart() == 1) :
resetSystemVariables()
# Fetch new state from the ticketing machine
xml = getCurrentState(inputXML);
# If data received
if (xml != -1):
# Update windows with new state
changes = updateWindowsState(xml)
print("Changes: " + str(changes))
# Check if changes exist or is it time to update anyway
if (changes > 0 or abs(time.time() - lastUpdateTimestamp) > forceUpdateInterval):
# Upload (new) data
uploadData()
else:
print("Invalid xml data");
# Sleep for some period
time.sleep(sleepDelay)
| true |
9d5b24cbfe70cbf2eea5cae2f83feef55b56b36e | Python | enigmatic-cipher/basic_practice_program | /Q66) WAP to calculate body mass index.py | UTF-8 | 198 | 3.671875 | 4 | [] | no_license | print("Body Mass Index Calculator")
height = int(input("Enter your height in Meter: "))
weight = int(input("Enter your weight in kg: "))
bmi = (weight / height ** 2)
print(f"Your BMI is {bmi}")
| true |
18ea0abdffdcdfc878900ee57cb4fb9bf8151ef1 | Python | GithuiVictor/SkaeHub-Developer-Program-Boot-Camp | /DAY 2/challenge3.py | UTF-8 | 486 | 4.0625 | 4 | [] | no_license | import random
number = random.randint(1, 200)
win = False
attempt = 0
print("Your hit:", number)
while win == False:
guess = int(input("Enter your guess: "))
attempt += 1
if guess == number:
print('You won!')
print('Number of attempts: ', attempt)
break
else:
if number > int(guess):
print('Your guess was low, Please enter a higher number')
else:
print('Your guess was high, Please enter a lower number') | true |
3949027c8b19170bcce1203ba5da06a11e2b0a21 | Python | amod99/RegressionEnsemble | /src/productionize_regensemble.py | UTF-8 | 1,642 | 2.78125 | 3 | [
"MIT"
] | permissive | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 2 18:02:31 2019
@author: Amod Agashe
This script executes xgboost model for predicting variable 'y'
"""
# lets import all required modules
import numpy as np
import sys
import xgboost
import pandas as pd
from sklearn.externals import joblib
import pickle
import time
from sklearn.metrics import mean_squared_error
from sklearn.preprocessing import MinMaxScaler
base_dir=sys.argv[1]
mxy=839
test_file=sys.argv[2]
def custom_accr(arrdiff,mxy):
return(len(arrdiff[arrdiff<=3/mxy])*100/len(arrdiff))
col_names=open(base_dir+'colnames.pkl','rb')
col_names=pickle.load(col_names)
#print(col_names)
minmaxwts=joblib.load(base_dir+'scaler_model.pkl')
#minmaxwts=pickle.load(minmaxwts)
xgb_model=joblib.load(base_dir+'xgb_model.pkl')
#xgb_model=pickle.load(xgb_model)
### filter only required features ###
test_data=pd.read_csv(test_file)
test_data_x=test_data[col_names]
test_data_y=test_data['y']/mxy
## fillna ##
test_data_x=test_data_x.fillna(test_data_x.mean())
### apply minmaxscaler ###
test_data_x=pd.DataFrame(minmaxwts.transform(test_data_x),columns=col_names)
start_time=time.time()
test_preds=xgb_model.predict(test_data_x)
end_time=time.time()
total_time=end_time-start_time
accr=custom_accr(np.abs(test_preds-test_data_y),mxy)
print("Rmse for test data: ",np.sqrt(mean_squared_error(test_preds,test_data_y)),\
"Accuracy for test data: ",accr," Time for execution " , total_time,"seconds")
#write results to txt file
test_preds=test_preds*mxy
with open(base_dir+'predictions_output.txt', 'w') as f:
for item in test_preds:
f.write("%s\n" % item)
| true |
da95696e91ab565bb33e15b4fec224dafea20674 | Python | larsh101/FinalDAO | /util/list.py | UTF-8 | 192 | 2.578125 | 3 | [] | no_license | import os
from os import listdir
from os.path import isfile, join
def go():
dir_path = os.path.dirname(os.path.realpath(__file__))
cwd = os.getcwd()
print(dir_path)
print(cwd) | true |
0c49e44833c25e6058f23c50a09beef0eb91bd44 | Python | arnmishra/CSAir | /tests/test_graph.py | UTF-8 | 2,783 | 3.28125 | 3 | [] | no_license | """ Tests for the Graph in graph.py
PyCharm Error Code Documented: https://youtrack.jetbrains.com/issue/PY-20171
"""
import unittest
from framework.graph import *
class TestGraph(unittest.TestCase):
def test_add_nodes(self):
""" Test that a node can be added and gotten correctly. """
graph = Graph()
node = Node("test_data")
graph.add_node("test_key", "test_data")
test_node = graph.get_node("test_key")
self.assertEqual(node.get_data(), test_node.get_data())
def test_add_connection(self):
""" Test that connections can be created properly. """
graph = Graph()
start_key = "first_test_key"
start_data = "first_test_data"
end_key = "second_test_key"
end_data = "second_test_data"
graph.add_node(start_key, start_data)
graph.add_node(end_key, end_data)
graph.add_connection(start_key, end_key, 0)
start_node = graph.get_node(start_key)
connected = start_node.get_connected_nodes()
self.assertEqual(len(connected), 1)
self.assertTrue(end_key in connected)
def test_delete_connection(self):
""" Test that a connection can be successfully deleted between nodes. """
graph = Graph()
start_key = "first_test_key"
start_data = "first_test_data"
end_key = "second_test_key"
end_data = "second_test_data"
graph.add_node(start_key, start_data)
graph.add_node(end_key, end_data)
graph.add_connection(start_key, end_key, 0)
graph.delete_connection(start_key, end_key)
all_nodes = graph.get_all_nodes()
for key in all_nodes:
node = all_nodes[key]
self.assertEqual(len(node.get_connected_nodes()), 0)
def test_delete_node(self):
""" Test that a node can be deleted correctly. """
graph = Graph()
start_key = "first_test_key"
start_data = "first_test_data"
end_key = "second_test_key"
end_data = "second_test_data"
graph.add_node(start_key, start_data)
graph.add_node(end_key, end_data)
graph.add_connection(start_key, end_key, 0)
graph.delete_node(end_key)
all_nodes = graph.get_all_nodes()
self.assertTrue(len(all_nodes) == 1)
deleted_node = graph.get_node(end_key)
self.assertFalse(deleted_node)
def test_modify_node(self):
""" Test that a node's data can be modified. """
graph = Graph()
key = "first_test_key"
data = "first_test_data"
modified_data = "second_test_data"
graph.add_node(key, data)
graph.set_node(key, modified_data)
node = graph.get_node(key)
self.assertEqual(node.get_data(), modified_data)
| true |
5c1a21c51443415222d073870c34dca9c87bf8d4 | Python | jam-web/UsingLists | /main.py | UTF-8 | 223 | 2.546875 | 3 | [] | no_license | websites = ["www.google.com","www.youtube.com","www.dropbox.com","www.quora.com","www.reddit.com","www.instagram.com"]
websites.pop(2)
websites.insert(2,"www.yahoo.com")
websites.append("www.facebook.com")
print(websites) | true |
185017b69253da0628016a388f8fa21fc03e837d | Python | dear-sera/Asia_Ai_Project | /2nd_mini_project/RPA_code/main.py | UTF-8 | 6,826 | 2.59375 | 3 | [] | no_license | from bs4 import BeautifulSoup
import pandas as pd
import numpy as np
import datetime
import requests
import fnmatch
import time
import re
import os
def get_soup(url):
req = requests.get(url) # html태그와 content내용을 Response 객체로 반환
html = req.text
soup = BeautifulSoup(html, 'html.parser')
return soup
#################################################################################
def get_df_data_on_site(soup):
col = []
data = []
box = soup.select('#filing_table > tbody tr') # n 개의 div
title = soup.select('#filing_table > thead td') # n 개의 div
for txt in title:
col.append(txt.text)
for row in box:
row_data = []
boxbox = row.select('td')
for txt in boxbox:
row_data.append(txt.text)
data.append(row_data)
return col, data
#################################################################################
def refine_df_and_version(col, data):
df = pd.DataFrame(data, columns=col)
p_date = re.compile(r'.{10}')
p_word = re.compile(r'[a-zA-Z]*$')
try:
drop_col = ['Filing']
df.drop(drop_col, axis=1, inplace=True)
except:
pass
df['Type'] = df['TransactionDate']
for i in range(len(df)):
m_date = p_date.search(df.iloc[i]['TransactionDate'])
m_word = p_word.search(df.iloc[i]['TransactionDate'])
Y, M, D, h, m = split_datetime(df['ReportedDateTime'].iloc[i])
df['Type'].iloc[i] = m_word.group()
df['TransactionDate'].iloc[i] = m_date.group()[2:]
df['ReportedDateTime'].iloc[i] = '{}-{}-{} {}:{}'.format(Y, M, D, h, m)
version = '.{}.{}.{}.{}.{}'.format(Y, M, D, h, m)
return df, version
#################################################################################
def set_initial_DB(name, form):
print('making dataframe.... {}....'.format(name))
soup = get_soup(url_form.format(form))
col, data = get_df_data_on_site(soup)
df, version = refine_df_and_version(col, data)
dir_DB = 'D:/AI/pjt2/DB'
filename = 'df_{}{}.xlsx'.format(name.lower(), version)
print('saving to DB as \'{}\''.format(filename))
df.to_excel('{}/{}'.format(dir_DB, filename), index=False)
#################################################################################
def split_datetime(datetype): # ex) '2021-04-2911:20 pm' / '2021-04-28:02 am'
date = datetype[2:10]
time = datetype[10:-3]
ap = datetype[-2:]
Y, M, D = date.split('-')
h, m = time.split(':')
if ap[0] == 'p':
h = str((int(h) + 12)) if h != '0' else '00'
else:
if len(h) == 1:
h = '0' + h
return Y, M, D, h, m
#################################################################################
def df_name(name):
return locals()['df_{}'.format(name.lower())]
#################################################################################
def df_from_db(tic, web_df):
p = re.compile(tic)
dirname = 'D:/AI/pjt2/DB'
filenames = os.listdir(dirname)
for db_filename in os.listdir(dirname):
if fnmatch.fnmatch(db_filename, '*{}*'.format(tic)):
return pd.read_excel('{}/{}'.format(dirname, db_filename)), ''
return web_df, '[NEW DATA]\n'
#################################################################################
def version_from_db(tic):
p = re.compile(tic)
dirname = 'D:/AI/pjt2/DB'
filenames = os.listdir(dirname)
for filename in filenames:
if p.search(filename): break
return filename[-19:-5].split('.')
#################################################################################
def version_from_web(df):
temp = df.iloc[-1]['ReportedDateTime']
temp = temp.split(' ')
return temp[0].split('-') + temp[1].split(':')
#################################################################################
def update_new_data(web_df, tic):
i = 0
db_df, note = df_from_db(tic, web_df)
if version_from_db(tic) != version_from_web(web_df):
for datetime in web_df[::-1]['ReportedDateTime']:
if db_df.iloc[-1]['ReportedDateTime'] == datetime:
df_df = pd.concat([db_df, web_df.iloc[-(i + 1):-1]])
break
else:
i += 1
return db_df, note + txt_log(tic, i), 1
else:
return db_df, note + txt_log(tic, i), 0
#################################################################################
def save_db(df, name, version):
dir_DB = 'D:/AI/pjt2/DB'
newname = 'df_{}{}.xlsx'.format(name.lower(), version)
filename = newname
for db_filename in os.listdir('D:/AI/pjt2/DB'):
if fnmatch.fnmatch(db_filename, '*{}*'.format(name)):
filename = db_filename
break
if filename != newname:
os.rename('{}/{}'.format(dir_DB, filename), '{}/{}'.format(dir_DB, newname))
print('saving to DB as \'{}\''.format(newname))
df.to_excel('{}/{}'.format(dir_DB, newname), index=False)
#################################################################################
def txt_log(tic, update_count):
return 'in \'{}\', {} data(s) updated\n'.format(tic, update_count)
#################################################################################
def make_log(txt, cnt):
dir_DB = 'D:/AI/pjt2/log'
now = datetime.datetime.now()
file_name = 'log{}({}).txt'.format(now.strftime('.%F.%H.%M.%S'), cnt)
full_name = '{}/{}'.format(dir_DB, file_name)
f = open(full_name, 'w')
f.write(txt)
f.close()
#################################################################################
if __name__ == '__main__':
url_lib = {'TESLA': '1318605',
'APPLE': '320193',
'PALANTIR': '1321655',
'COUPANG': '1834584',
'UNITY': '1810806',
'GOLUB_CAPITAL_BDC': '1476765',
'AMAZON': '1018724',
'DATADOG': '1561550'}
url_form = 'https://www.secform4.com/insider-trading/{}.htm'
txt_for_log = ''
update_count = 0
for name, form in url_lib.items():
name = name.lower()
soup = get_soup(url_form.format(form))
col, data = get_df_data_on_site(soup)
df, version = refine_df_and_version(col, data)
df, txt, cnt = update_new_data(df, name)
txt_for_log += txt
update_count += cnt
save_db(df, name, version)
make_log(txt_for_log, update_count)
print('\n\ncomplete\n\nend in...')
for i in range(3, -1, -1):
time.sleep(1)
print(i)
| true |
809e901bd59261096a299234c7fc7e45d996f5c1 | Python | Nithi07/hello_world | /sum&avg.py | UTF-8 | 336 | 4.25 | 4 | [] | no_license | """ 8. (3)Given a string, return the sum and average of the digits that appear in the string,
ignoring all other characters
"""
message = input('Enter: ')
a = []
for i in message:
if i.isdigit():
a.append(int(i))
tot = sum(a)
ave = tot / len(a)
print(f'sum of digit: {tot}\n average of digits: {ave}')
| true |
f4c17642679c2fd50a93687af532c6cbc9789fb2 | Python | toiaydcdyywlhzvlob/backpack | /libraries/backpack/backpack/utils/utils.py | UTF-8 | 1,156 | 2.578125 | 3 | [] | no_license | """Utility functions."""
import opt_einsum as oe
import torch
TORCH = "torch"
OPT_EINSUM = "opt_einsum"
BPEXTS_EINSUM = "torch"
def _oe_einsum(equation, *operands):
# handle old interface, passing operands as one list
# see https://pytorch.org/docs/stable/_modules/torch/functional.html#einsum
if len(operands) == 1 and isinstance(operands[0], (list, tuple)):
operands = operands[0]
return oe.contract(equation, *operands, backend='torch')
EINSUMS = {
TORCH: torch.einsum,
OPT_EINSUM: _oe_einsum,
}
def einsum(equation, *operands):
"""`einsum` implementations used by `backpack`.
Modify by setting `backpack.utils.utils.BPEXTS_EINSUM`.
See `backpack.utils.utils.EINSUMS` for supported implementations.
"""
return EINSUMS[BPEXTS_EINSUM](equation, *operands)
def random_psd_matrix(dim, device=None):
"""Random positive semi-definite matrix on device."""
if device is None:
device = torch.device("cpu")
rand_mat = torch.randn(dim, dim, device=device)
rand_mat = 0.5 * (rand_mat + rand_mat.t())
shift = dim * torch.eye(dim, device=device)
return rand_mat + shift
| true |
e6a6c25106548c1591d35d2de85bffa4a447c440 | Python | jasperan/python | /ex42_inheritance.py | UTF-8 | 1,282 | 3.71875 | 4 | [] | no_license | class Animal(object):
pass
class Dog(Animal):
def __init__(self, name):
self.name = name
class Cat(Animal):
def __init__(self, name):
self.name = name
self.hairs = None
def setHairs(self, hairs):
self.hairs = hairs
class Person(object):
def __init__(self, name):
self.name = name
self.pet = None
class Employee(Person):
def __init__(self, name, salary):
super(Employee, self).__init__(name)
self.salary = salary
class Fish(object):
pass
class Salmon(Fish):
pass
class Halibut(Fish):
pass
rover = Dog('Rover')
satan = Cat('Satan')
mary = Person('Mary')
mary.pet = satan
frank = Employee('Frank', 12000)
frank.pet = rover
flipper = Fish()
crouse = Salmon()
harry = Halibut()
print rover.name
print satan.name
print mary.name
print 'Mary\'s pet name %s and number of hairs: %s' % (mary.pet.name, mary.pet.hairs)
print 'Changing number of hairs to mary\'s pet to 500'
mary.pet.hairs = 500
print 'New number of hairs for pet %s: %s' % (mary.pet.name, mary.pet.hairs)
print 'Information about super with Employee - Person:'
print 'Frank is an employee.'
print 'Printing information about frank. Name: %s, Pet: %s, Salary: %s' % (frank.name, frank.pet.name, frank.salary)
print 'Frank\'s attribute is salary, and inherits name and pet from its superclass'
| true |