blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
133
path
stringlengths
2
333
src_encoding
stringclasses
30 values
length_bytes
int64
18
5.47M
score
float64
2.52
5.81
int_score
int64
3
5
detected_licenses
listlengths
0
67
license_type
stringclasses
2 values
text
stringlengths
12
5.47M
download_success
bool
1 class
6e9465f131258246cc44866d50dd3e24bf517224
Python
indraastra/hanji-sandbox
/app/lib_kanji.py
UTF-8
1,654
2.6875
3
[]
no_license
import os import xmltodict import flask __ROOT_DIR__ = os.path.dirname(os.path.abspath(__file__)) SVG_DIR = os.path.join(__ROOT_DIR__, '..', 'data', 'kanji') filters = flask.Blueprint('filters', __name__) def path_to_kanji(kanji): filename = "{:05x}.svg".format(ord(kanji[0])) path = os.path.join(SVG_DIR, filename) if os.path.exists(path): return path def load_svg(kanji): svg_file_name = path_to_kanji(kanji) if not svg_file_name: return with open(svg_file_name) as svg: return svg.read() def parse_svg(svg): if not svg: return svgd = xmltodict.parse(svg) return svgd @filters.app_template_filter() def unparse_svg(svgd): svg = xmltodict.unparse(svgd, full_document=False) return svg @filters.app_template_filter() def kanji_to_svg(kanji): svg = load_svg(kanji) if not svg: return svgd = parse_svg(svg) if not svgd: return svgd['svg']['@width'] = '300' svgd['svg']['@height'] = '300' return unparse_svg(svgd) def extract_strokes(svgd): svg = svgd['svg'] g = svg['g'] assert(len(g) == 2) strokes = [] def _extract_strokes(svgd): for k, v in svgd.items(): if k == 'path': if isinstance(v, list): vs = v strokes.extend({k: v} for v in vs) else: strokes.append({k: v}) elif isinstance(v, xmltodict.OrderedDict): _extract_strokes(v) elif isinstance(v, list): for s_v in v: _extract_strokes(s_v) _extract_strokes(svgd) return strokes
true
207b2e89791b5085d609228f6a53d499811b0931
Python
thecipherrr/Assignments
/Hapax legomenon/Using files with python.py
UTF-8
658
3.25
3
[]
no_license
# program for finding hapax legomenons from string import punctuation def find_hapax(): file = open(' monthy python.txt', 'r') text = file.read().lower().replace("\n"," ") filtered = "" for words in text: if words not in punctuation: filtered += words text = filtered data = text.split(" ") counter = {} for i in data: if i not in counter: counter[i] = 1 else: counter[i] += 1 hapax_counter = [] for j in counter: if counter[j] == 1: hapax_counter.append(j) return hapax_counter print(find_hapax())
true
e7468925146ad3050ae6e1eb946b4af39df4af1f
Python
nerohuang/LeetCode
/Array and string class/Diagonal Traverse.py
UTF-8
2,371
3
3
[]
no_license
class Solution: def findDiagonalOrder(self, matrix: List[List[int]]) -> List[int]: if matrix == []: return [] m_col = len(matrix) m_row = len(matrix[0]) col_c = 0 row_c = 0 m_size = m_col * m_row m_sort = [] result = [] sort_list = [] if m_col > m_row : m_min = m_row else: m_min = m_col if m_size == 1: m_sort.append(matrix[0][0]) return(m_sort) if m_size == 2: if m_col > m_row: m_sort.append(matrix[0][0]) m_sort.append(matrix[1][0]) else: m_sort.append(matrix[0][0]) m_sort.append(matrix[0][1]) return(m_sort) sort_list = [1, 1] count = 1 m_size = m_size - 2 while m_size >0: if count + 1 <= m_min: count = count + 1 if m_size - 2*count >= 0: sort_list.insert(int(len(sort_list)/2),count) sort_list.insert(int(len(sort_list)/2),count) m_size = m_size - 2*count else: sort_list.insert(int(len(sort_list)/2),m_size) m_size = 0 print(sort_list) for i in range(len(sort_list)): if ((i + 1) % 2 == 1): for l in range(sort_list[i]): print(sort_list[i], l, col_c, row_c) result.append(matrix[col_c][row_c]) if (col_c - 1 >= 0) and (row_c + 1 < m_row): col_c = col_c - 1 row_c = row_c + 1 if row_c + 1 < m_row: row_c = row_c + 1 else: col_c = col_c + 1 else: for l in range(sort_list[i]): print(sort_list[i], l, col_c, row_c) result.append(matrix[col_c][row_c]) if row_c - 1 >= 0 and (col_c + 1 < m_col): col_c = col_c + 1 row_c = row_c - 1 if col_c + 1 < m_col: print(col_c) col_c = col_c + 1 else: row_c = row_c + 1 return result
true
acd60f142c7ab34a7aa63c747f4bd202eba2b23c
Python
luca16s/INF1025
/RECURSIVIDADE/numero9.py
UTF-8
696
3.5
4
[]
no_license
def geraTermos(numero): if numero == 1: return numero if numero%2 == 0: print(str(numero)) return geraTermos(numero/2) else: print(str(numero)) return geraTermos(numero*3+1) def geraTermosEComprCiclo(numero, count=0): if numero == 1: print(str(numero)) count +=1 return count if numero%2 == 0: print(str(numero)) count +=1 return geraTermosEComprCiclo(numero/2, count) else: print(str(numero)) count +=1 return geraTermosEComprCiclo(numero*3+1, count) #print(str(geraTermos(22))) print('A quantidade de iterações foi de: %s'%str(geraTermosEComprCiclo(22)))
true
cad23e2b0dcbd03648ff9fb547e2522f32a2711a
Python
AdamZhouSE/pythonHomework
/Code/CodeRecords/2385/60714/245386.py
UTF-8
213
3.140625
3
[]
no_license
n = int(input()) a = 2 b = 3 ans = [a, b] for i in range(0, 98): temp = b b = a + b a = temp ans.append(b) for i in range(0, n): temp = int(input()) print(ans[temp - 1] % (pow(10, 9) + 7))
true
b7c14351b8e7f6251bd46969aa0c0e1100633b81
Python
Wojtascythe/roboty-ratownicze
/app/src/consts.py
UTF-8
623
2.859375
3
[]
no_license
from enum import Enum class RobotStatus(Enum): """ Flaga wysyłana od sterownika nadrzędnego do sterownika robota """ STOP = 1 RUN = 2 class RobotNotification(Enum): """ Informacja od robota przesyłana do sterownika nadrzędnego """ NONE = 0 ARRIVED = 1 FOUND_HUMAN = 2 FOUND_OBSTACLE = 3 WANT_RUN = 4 class MapObject(Enum): """ Objekty przechowywane w mapie Lokalizacja robotów oznaczana poprzez indeks robota (wartość dodatnia) """ EMPTY = -1 WALL = -2 OBSTACLE = -3 HUMAN = -4 VISITED = -5 STEPS = -6
true
5c76df8132f48510fe9898611e70e00f7538047a
Python
katadh/leiaS14
/semantics/linking.py
UTF-8
2,974
3
3
[]
no_license
from itertools import chain ### access => generate permu of index to generate element of the access def permu(sizeSlot,sizeFiller,distList=[],access=[]): if distList==[]: distList=[sizeFiller]*sizeSlot return helper(distList,used=[0]*(sizeFiller+1),access=access) ## Generate permu that allows repeat element def productRule(distList=[]): return helper(distList,repeat=True) def helper(permuList,repeat=False,size=0,used=[0]*100,current=[],access=[]): temp = [] if size == len(permuList): return [current] for i in range(permuList[size]): if repeat: temp = temp + helper(permuList,repeat,size+1,used,current+[i]) elif used[i]==0: used[i]=1 if access==[]: temp = temp +helper(permuList,repeat,size+1,used,current+[i]) else: temp = temp + helper(permuList,repeat,size+1,used,current+[access[i]],access=access) used[i]=0 return temp ### Generate a list of concept except itself def getRestList(indexOut,size): temp = [] for n in range(size+1): if n!=indexOut: temp.append(n) return temp #Input = list of the concept #Output = list of all possible instance def findAllLinking(listConcept): listInstance = [] listPermu = [] ### generate all the combination of the assignment sizeFiller = len(listConcept)-1 index = 0 for Con in listConcept: if type(Con) is tuple: Con = Con[0] currentList = getRestList(index,sizeFiller) sizeSlots = len(Con.class_slots()) p = permu(sizeSlots,sizeFiller,access=currentList) listPermu += [p] index = index+1 sizeList =[] for l in listPermu: sizeList.append(len(l)) ### generate all combination of orders of assignment OrderList = productRule(sizeList) for i in OrderList: #Generate new set of Instance InstanceList =[] for j in listConcept: if type(j) == tuple: if j[1] == []: temp = j[0]() else: temp = j[0](j[1]) else: temp = j() InstanceList.append(temp) status = True for j in range(len(InstanceList)): ind = 0 for slot in InstanceList[j].slots(): ### Example ### [ [[]] , [[0,2]],[2,0]], [[0,1], [1,0]] ] ### listPermu[j] -> get the list of permu of j instance ### i[j] -> get the number of the possible permu ### ind -> get the slot to be filled indexIn = listPermu[j][i[j]][ind] status = status and InstanceList[j].slots()[slot].fill(InstanceList[indexIn]) ind = ind+1 if status: listInstance.append(InstanceList) return listInstance
true
8d9fdbf694ffa39fd6a5922e7923902c200180f1
Python
viktorradnai/flightgear-calibrationsphere
/sphere.py
UTF-8
4,591
2.609375
3
[]
no_license
#!/usr/bin/python import sys import logging import argparse logger = logging.getLogger(__name__) materials = '''MATERIAL "White" rgb 1.0000 1.0000 1.0000 amb 0.2000 0.2000 0.2000 emis 1.0000 1.0000 1.0000 spec 0.5000 0.5000 0.5000 shi 10 trans 0.0000 MATERIAL "Black" rgb 0.0000 0.0000 0.0000 amb 0.2000 0.2000 0.2000 emis 0.0000 0.0000 0.0000 spec 0.5000 0.5000 0.5000 shi 10 trans 0.0000 ''' def parse_cmdline(): parser = argparse.ArgumentParser(description=''' This script will replace the materials in an AC3D file to contain a black and white chessboard pattern.''' ) parser.add_argument('-v', '--verbose', action='store_true', help="Enable verbose output") parser.add_argument('-q', '--quiet', action='store_true', help="Output errors only") parser.add_argument('infile', help="Input AC3D file") parser.add_argument('outfile', help="Output AC3D file") args = parser.parse_args() if args.verbose: loglevel = logging.DEBUG elif args.quiet: loglevel = logging.ERROR else: loglevel = logging.INFO logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s') return args def main(): global materials args = parse_cmdline() with open(args.infile) as f: with open(args.outfile, 'w') as of: faces = [] state = 0 # 1 means inside SURFace node, 0 means everything else poly = { 'mat': '', 'refs': 0, 'vertices': [] } for l in f: if 'MATERIAL' in l: of.write(materials) materials = '' # really dodgy way of making sure this is only printed once continue if 'OBJECT' in l: state = 0 faces = [] if 'SURF ' in l or 'kids' in l: state = 1 if poly['refs']: # we've processed a SURFace previously for f in faces: # Find number of common vertices between surfaces intersect = len(set(poly['vertices']) & set(f['vertices'])) if intersect > 2: # A chessboard-compatible object should not have two faces with more than two common vertices logger.error("3D Object is incompatible because it has faces that share more than one line. Common vertices: %s", intersect) exit(1) elif intersect == 2: # two common vertices, these are neighbours and should have different colours logger.debug('neighbour found') if f['mat'] == poly['mat']: logger.debug('material swapped') poly['mat'] = int(not f['mat']) logger.debug(f) logger.debug(poly) elif intersect == 1: # one common vertex, these are joined at one corner and should have the same colour logger.debug('opposite found') if f['mat'] != poly['mat']: logger.debug('material swapped') poly['mat'] == f['mat'] logger.debug(f) logger.debug(poly) # Write out SURFace node of.write("SURF 0X10\n") of.write("mat {0}\n".format(poly['mat'])) of.write("refs {0}\n".format(poly['refs'])) for v in poly['vertices']: of.write("{0} 0 0\n".format(v)) faces.append(poly) # save for comparison later poly = { 'mat': '', 'refs': 0, 'vertices': [] } # wipe out SURFace data that was written if 'kids' in l: # this seems to indicate the end of an OBJECT node state = 0 if state == 0: # non-SURF mode of.write(l) elif state == 1: # SURF mode t = l.split() # Tokenise line if t[0] == 'mat': poly['mat'] = int(t[1]) elif t[0] == 'refs': poly['refs'] = int(t[1]) elif t[0] == 'SURF': continue else: poly['vertices'].append(int(t[0])) sys.exit(0) # call main() if __name__ == '__main__': main()
true
e6c4de3b813b2d560b0bed8918004ab210e06a75
Python
marcusljx/algorithms
/datastructures/graph/graph.py
UTF-8
630
3.125
3
[ "MIT" ]
permissive
import random from collections import namedtuple from datastructures.graph import node linking_iterations = 20 def random_graph(size): nodes = [node.Node(data=i) for i in range(size)] random_group = lambda g,p : [random.choice(g) for _ in range(int(size * p))] edges = [] for _ in range(linking_iterations): r1 = random_group(nodes, 0.1) r2 = random_group(nodes, 0.15) for n in r1: edges += [n.link(target) for target in random_group(r2, 0.4) if n != target and not n.connects_to(target)] g = namedtuple("Graph", "V E") g.E = edges g.V = nodes return g
true
e81c2638188e373ee9a4886c8e4e1534feca6b95
Python
yasirroni/myNafiun
/lib/no_corner_square.py
UTF-8
347
3.609375
4
[ "MIT" ]
permissive
def no_corner_square(number_of_x): if number_of_x > 0: print(f' {"x"*number_of_x} ') for _ in range(number_of_x): print(f'x{" "*number_of_x}x') print(f' {"x"*number_of_x} ') else: print('Please insert a valid number') if __name__=='__main__': number_of_x=5 no_corner_square(number_of_x)
true
6b2e50eb6a9708ce4aab97e9fbaef2db8f15b426
Python
miyajan/atcoder
/ABC-257/b/main.py
UTF-8
490
2.75
3
[]
no_license
import sys def main(): readline = sys.stdin.readline n, k, q = map(int, readline().split()) a = list(map(int, readline().split())) l = list(map(int, readline().split())) for i in range(q): index = l[i] - 1 if index == len(a) - 1 and a[index] == n: continue if index < len(a) - 1 and a[index] + 1 == a[index + 1]: continue a[index] += 1 print(" ".join(map(str, a))) if __name__ == '__main__': main()
true
eb79740c8f30003cf385d81dc72c3e297940d908
Python
zfanli/marucat
/tests/try_test.py
UTF-8
524
2.546875
3
[ "MIT" ]
permissive
from pymongo import MongoClient from bson import ObjectId def concurrency_update(): """Test concurrency update Test $inc :return: """ mc = MongoClient() col = mc['blog']['articles'] _id = '5b432a42f04705565525529d' col.update_one( {'_id': ObjectId(_id)}, {'$set': {'views': 0}} ) for x in range(10000): col.update_one( {'_id': ObjectId(_id)}, {'$inc': {'views': 1}} ) if __name__ == '__main__': concurrency_update()
true
888398d726d660457b5af553c152ef582692fc1d
Python
lenarhus/opengift.io
/PManager/classes/git/diff_parser.py
UTF-8
3,619
2.71875
3
[ "Apache-2.0" ]
permissive
# -*- coding:utf-8 -*- __author__ = 'Tonakai' import re from PManager.classes.git.file_diff import FileDiff class DiffParser(object): DELETED_LINE_START = "-" CREATED_LINE_START = "+" _raw = None _raw_length = 0 message_end_offset = 0 _hash = None _author = None _date = None _message = None _files = None def __init__(self, raw): if raw is None or len(raw) <= 0: raise IOError("Cannot parse empty diff") if not raw.startswith("commit"): raise IOError("Cannot parse unparsable diff") self.parse(raw) def raw(self, line): if self._raw_length <= line: return '' return self._raw[line] def parse(self, raw): self._raw = raw.split('\n') self._raw_length = len(self._raw) self.__load_message() self.__load_author() self.__load_date() self.__load_hash() self.__load_files() @staticmethod def __parse_binary(file_obj, m): mode = False (old_path, new_path) = m.groups() if old_path == '/dev/null': mode = "C" file_obj['path'] = new_path elif new_path == '/dev/null': mode = "D" file_obj['path'] = old_path else: file_obj['path'] = new_path file_obj['action'] = mode file_obj['diff'] = False file_obj['lines'] = [] file_obj['summary'] = { "deleted": 1 if mode == "D" else 0, "created": 1 if mode == "C" else 0, "binary": True } return file_obj @property def hash(self): return self._hash @property def author(self): return self._author @property def commit_message(self): return self._message @property def date(self): return self._date @property def message(self): return self._message @property def files(self): return self._files def __load_hash(self): self._hash = self.raw(0).replace("commit ", "") def __load_author(self): data = self.raw(1).replace("Author: ", "") ts = data.split("<") self._author = {"name": ts[0].strip(" "), "email": ts[1].strip(" ><")} def __load_date(self): data = self.raw(2).replace("Date: ", "").strip(" ") self._date = data def __load_message(self): message_ends = False data = "" i = 3 while not message_ends: data += self.raw(i).strip(" \t") + "\n" i += 1 if re.search('diff --git', self.raw(i)): message_ends = True self.message_end_offset = i self._message = data.strip(" \n\r") def __load_files(self): self._files = self.__parse_files(self.message_end_offset) def __get_files_diff(self, start): files = [] _file = [] for i in range(start, self._raw_length): if self.raw(i).startswith('diff --git') and len(_file) > 0: files.append(_file) _file = [self.raw(i)] else: _file.append(self.raw(i)) if len(_file) > 0: files.append(_file) return files def __parse_files(self, start): files = self.__get_files_diff(start) files_parsed = [] for f in files: files_parsed.append(FileDiff(f)) return files_parsed
true
eeb7b2130f315dd59e2daa445c30f08acb4b02e1
Python
NazmulHayat/Competitive-Programming
/CodeForces And Random Practice Problems/removeduplicates.py
UTF-8
140
3.046875
3
[]
no_license
for T in range(1,int(input())+1): s=input() S="" for i in s: if i not in S:S+=i print("Case #%d:"%T) for i in S:print(i, s.count(i))
true
1baaadb5cb2e5651e130b53c9813c6504d788709
Python
jamesjakeies/python-api-tesing
/python3_libraries/asyncio/asyncio_call_later.py
UTF-8
670
2.96875
3
[]
no_license
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Author: xurongzhong#126.com wechat:pythontesting qq:37391319 # CreateDate: 2018-1-26 # From https://pymotw.com/3/asyncio/coroutines.html # asyncio_call_later.py import asyncio def callback(n): print('callback {} invoked'.format(n)) async def main(loop): print('registering callbacks') loop.call_later(0.2, callback, 1) loop.call_later(0.1, callback, 2) loop.call_soon(callback, 3) await asyncio.sleep(0.4) event_loop = asyncio.get_event_loop() try: print('entering event loop') event_loop.run_until_complete(main(event_loop)) finally: print('closing event loop') event_loop.close()
true
9ffa95ca3da2bd0e9c24087d54309c62d09577cb
Python
lj1064201288/dell_python
/网上练习题/lx000.py
UTF-8
999
3.265625
3
[]
no_license
import string, random, pymysql forselect = string.ascii_letters + '1234567890' def generate(count, lenght): ''' :param count: 激活码的个数 :param lenght: 激活码的长度 :return: 激活码 ''' for i in range(count): RE = '' for x in range(lenght): RE += random.choice(forselect) yield RE def MySQL(code): db = pymysql.connect(host='localhost', user='root', password='123456', port=3306, db='codes') cursor = db.cursor() cursor.execute('CREATE TABLE IF NOT EXISTS code (id VARCHAR(255) NOT NULL, content VARCHAR(255) not null, PRIMARY KEY (id))') try: sql = 'insert into code(content)VALUES ({})'.format(code) cursor.execute(sql) db.commit() print('插入成功') except: db.rollback() print('插入失败') else: cursor.close() db.close() if __name__ == '__main__': code = generate(200,20) print(list(code)) MySQL(code)
true
9102c3edd28ad146c35c192f74b6a09b48538b9a
Python
baranee-18/Data-Structures-and-Algorithms
/maximum-units-on-a-truck/maximum-units-on-a-truck.py
UTF-8
442
2.984375
3
[]
no_license
class Solution: def maximumUnits(self, boxTypes: List[List[int]], truckSize: int) -> int: boxTypes.sort(key = lambda x: x[1], reverse = True) res = 0 for i in boxTypes: if i[0] > truckSize: res += min(truckSize, i[0]) * i[1] truckSize = 0 break else: res += (i[0] * i[1]) truckSize -= i[0] return res
true
d67ce222f65db0108d159c2f08fea261b3d6d1bb
Python
AustinKuture/AKbase2233
/akbase2233/AKbase2233.py
UTF-8
16,769
3.046875
3
[]
no_license
# coding=utf-8 """ @header encrpyt_decrpyt_sheet.py @abstract 加密设计思路:1, 需要自己做一个加密表,表的长度为29位(26个英文字母加3个特殊符号 +,/,=)main_sheet, 宽度是一个22位的辅助查询表sub_sheet,另加一个索引坐标定位表 number_sheet 2, 加密: * 1> 使用base32先对要加密的字符串(大写)进行编码 * 2> 加密时是逐个字符进行加密的 * 3> 获取字符在number_sheet中的坐标索引x * 4> 计算结果坐标索引y, mn = 28 - x * 5> 分解mn,取n, 则结果坐标索引 y = 21 - n 注,28, 21分别为main_sheet, sub_sheet的索引最大值(索引从0开始)根据实际情况进行调整 * 6> 根据计算出的结果坐标索引 (x,y),从main_sheet去查询 A = main_sheet[y][x] * 7> 对y值作为索引查询 sub_sheet, B = sub_sheet[y] * 8> 则加密结果为 AB * 9> 例如 加密字符 S -> x=18 mn=28-18=10 n=0 y=21-n=21 A=main_sheet[21][18]=J B=[sub_sheet][21]=W => JW * 10> 对于数字字符a,直接计算B=21-int(a) 从sub_sheet中查询 A=sub_sheet[B],获取A后再在前面补"-", a => -A 3, 解密: * 1> 解密时先将字符串两两一组进行分割 * 2> 根据两个字符再进行解密,例如 AB 先用于B从 sub_sheet获取索引值y * 3> 根据索引值y,从main_sheet中找到相应的行 * 4> 根据行号y,找到的那行再获取A在该行的索引值x ,x=main_sheet[sub_sheet[B]].index(A) * 5> 使用索引值x从number_sheet中获取解密的字符 * 6> 对于数字型字符, -A, 直接a = 21 - sub_sheet[A]即可 * 7> 最后将所有解密后的字符拼接,再进行base32解码即可 4, 随机辅助查询表, 新的功能中加入了随机更换辅助查询表的功能 @MyBlog: http://www.kuture.com.cn @author Created by Kuture on 2020/9/21 @version 0.1.5 2020/9/21 Creation() @Copyright © 2020年 Mr.Li All rights reserved """ import pickle from random import randint from base64 import b32encode, b32decode class SecrecSheet(object): def __init__(self): self._main_sheet = [ ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', '+', '/', 'a'], ['Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', '+', '/', 'a'], ['W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', '+', '/', 'a'], ['U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', '+', '/', 'a'], ['S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', '+', '/', 'a'], ['Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', '+', '/', 'a'], ['O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', '+', '/', 'a'], ['M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', '+', '/', 'a'], ['K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', '+', '/', 'a'], ['I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', '+', '/', 'a'], ['G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', '+', '/', 'a'], ['E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', '+', '/', 'a'], ['C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', '+', '/', 'a'], ['B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', '+', '/', 'a'], ['D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', '+', '/', 'a'], ['F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', '+', '/', 'a'], ['J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', '+', '/', 'a'], ['H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', '+', '/', 'a'], ['L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'u', 'v', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', '+', '/', 'a'], ['N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', '+', '/', 'a'], ['P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', '+', '/', 'a'], ['R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', '=', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '+', '/', 'a'], ['=', 'b', 'c', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '+', '/', 'a'], ['h', 'i', 'j', 'k', 'l', 'N', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'w', 'x', 'y', 'z', '=', 'b', 'c', 'd', 'e', 'f', 'O', 'P', 'Q', 'g', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', '+', '/', 'a'], ] self._sub_encrypt_sheet_list = [ ['a', 'Y', '8', '7', '9', 'K', 'u', 'T', 'U', 'R', 'E', 'e', 'I', 'N', 'S', 't', '1', 'i', 'n', 'y', 'J', 'j', '0', 'k'], ['A', 'y', '8', '7', '9', 'k', 'U', 't', 'u', 'r', 'e', 'E', 'i', 'n', 's', 'T', '1', 'I', 'N', 'Y', 'j', 'J', '0', 'K'], ['a', 'b', '0', '1', 'c', '2', '3', 'd', '5', 'e', '6', 'f', '7', 'g', '8', 'h', 'i', 'j', 'o', 'l', '9', 'm', 'n', 'k'], ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'Y', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'K'], ['0', '1', '2', '3', '5', '6', '7', '8', '9', 'Z', 'Y', 'X', 'W', 'V', 'U', 'T', 'S', 'R', 'Q', 'P', 'O', 'N', 'M', 'L'], ['z', '9', 'y', '8', 'x', '7', 'w', '6', 'v', '5', 'u', '3', 't', '2', 's', '1', 'r', 'q', 'p', 'o', 'n', 'm', 'k', 'l'] ] self._number_sheet = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '+', '/', '='] self._rand_encrypt_dict = {'0': 'k', '1': 'u', '2': 't', '3': 'U', '4': 'R', '5': 'S'} self._rand_decrypt_dict = {'k': '0', 'u': '1', 't': '2', 'U': '3', 'R': '4', 'S': '5'} class base2233(SecrecSheet): ''' 加密与解密是分别是1对2,2对1的关系,同时在加密与解密过程中需要3个密码表参与查询与处理,故称为2233''' def encode(self, input_str, ak_rand=False): ''' :param input_str: input encrypt string :param ak_rand: is ak_rand is True, will be use random secret sheet, encrypt input string :return: return encrypt string ''' # check ak key, if ak rand is exist and use random sub sheet rand_str = '' if ak_rand: rand_num = randint(0, len(self._sub_encrypt_sheet_list) - 1) sub_encrypt_sheet = self._sub_encrypt_sheet_list[rand_num] rand_str = self._rand_encrypt_dict[str(rand_num)] else: sub_encrypt_sheet = self._sub_encrypt_sheet_list[0] # use base64 encode input str if isinstance(input_str, str): input_str = input_str.encode() base64_en_str = b32encode(input_str) base64_en_str = base64_en_str.decode() encrypt_list = [] for encrypt_char in base64_en_str: # obtain char index number if encrypt_char.isdigit(): encrypt_result = '-' + sub_encrypt_sheet[len(sub_encrypt_sheet) - 1 - int(encrypt_char)] else: encrypt_x = self._number_sheet.index(encrypt_char.upper()) char_number_y = str(len(self._number_sheet) - 1 - encrypt_x) # calculator encrypt y index number if len(char_number_y) == 2: encrypt_y = len(sub_encrypt_sheet) - 1 - int(char_number_y[1]) else: encrypt_y = len(sub_encrypt_sheet) - 1 - int(char_number_y) # search encrypt char main_encrypt_char = self._main_sheet[encrypt_y][encrypt_x] sub_encrypt_char = sub_encrypt_sheet[encrypt_y] # join main encrypt char and sub encrypt char encrypt_result = main_encrypt_char + sub_encrypt_char encrypt_list.append(encrypt_result) result_encrypt_str = ''.join(encrypt_list) if ak_rand: result_encrypt_str = result_encrypt_str + rand_str return result_encrypt_str def decode(self, input_str, res_decode=False, ak_rand=False): ''' :param input_str: input string :param res_decode: decode output or not, default is False :param ak_rand: if ak_rand is true, will be use random secrec sheet decrypt input string :return: ''' # check ak key, if ak key is exist, use customer key if ak_rand: rand_str = input_str[-1] rand_num = self._rand_decrypt_dict[rand_str] sub_encrypt_sheet = self._sub_encrypt_sheet_list[int(rand_num)] input_str = input_str[:-1] else: sub_encrypt_sheet = self._sub_encrypt_sheet_list[0] input_str_length = int(len(input_str) / 2) input_str_list = [input_str[a * 2:(a + 1) * 2] for a in range(input_str_length)] decrypt_list = [] for en_str in input_str_list: # decrypt number char if '-' in en_str: result_char = str(len(sub_encrypt_sheet) - 1 - sub_encrypt_sheet.index(en_str[1])) # decrypt string char else: tail_char = en_str[1] tail_char_index = sub_encrypt_sheet.index(tail_char) start_char_index = self._main_sheet[tail_char_index].index(en_str[0]) result_char = self._number_sheet[start_char_index] decrypt_list.append(result_char) result_str = ''.join(decrypt_list) decrypt_result = b32decode(result_str.encode()) if res_decode: decrypt_result = decrypt_result.decode() return decrypt_result if __name__ == '__main__': ak2233 = base2233() input_str = 'www.kuture.com.cn' print('{} Input: {}'.format(len(input_str), input_str)) encrypt_result = ak2233.encode(input_str, ak_rand=True) print('{} Encrypt Result: {}'.format(len(encrypt_result), encrypt_result)) decrypt_result = ak2233.decode(encrypt_result, res_decode=True, ak_rand=True) print('Decrypt Result: ', decrypt_result)
true
a2693da706be8a3d0ff12d1d0b7f347baa94f5a6
Python
Mendi23/decision_tree
/q1.py
UTF-8
1,192
2.765625
3
[]
no_license
from numpy import reshape from sklearn import tree import pandas as pd from sklearn.metrics import confusion_matrix, make_scorer from sklearn.model_selection import cross_validate def get_cross_val_score(classifier, datapath, labelCol, kFold): labeledData = pd.read_csv(datapath, sep = ',', index_col = labelCol, converters = { labelCol: lambda x: 1 if x == "True" else 0 }) dataArray = labeledData.values[:, :] labelsArray = labeledData.index.values def cm(i, j): def cm_aux(y_true, y_pred): return confusion_matrix(y_true, y_pred)[i, j] return cm_aux scoring = { 'tp': make_scorer(cm(1, 1)), 'tn': make_scorer(cm(0, 0)), 'fp': make_scorer(cm(0, 1)), 'fn': make_scorer(cm(1, 0)) } cvResults = cross_validate(classifier, dataArray, labelsArray, scoring = scoring, cv = kFold) keys = ("test_" + k for k in ('tp', 'fp', 'fn', 'tn')) result = [sum(cvResults[i]) for i in keys] return (result[0] + result[3]) / sum(result), result id3dt = tree.DecisionTreeClassifier(criterion = "entropy") t = get_cross_val_score(id3dt, "flare.csv", 32, 4) print(t[0]) print(reshape(t[1],(2,2)))
true
445a6caad2c403c78b4914eb2097543612802f3f
Python
oscar-sanjim/flickrNeuronal
/neuroNet.py
UTF-8
5,467
3.046875
3
[]
no_license
from __future__ import division from __future__ import print_function from PIL import Image from sklearn import cross_validation from sklearn import grid_search from sklearn import svm from sklearn import metrics from StringIO import StringIO from urlparse import urlparse import urllib2 import sys import os def process_directory(directory): training = [] for root, _, files in os.walk(directory): # Method that calls the root, the directories and the files for file_name in files: # Go through each file file_path = os.path.join(root, file_name) # Get the absolute path img_feature = process_image_file(file_path) # Get each image feature vector if img_feature: training.append(img_feature) # Append to the training list return training def process_image_file(image_path): image_fp = StringIO(open(image_path, 'rb').read()) # Open a file as a string try: image = Image.open(image_fp) # Open image in the Image library return process_image(image) # Get the image feature vector except IOError: # Error Handling return None def process_image_url(image_url): parsed_url = urlparse(image_url) # Parse the string to a valid URL request = urllib2.Request(image_url) # Prepare the request # Add headers to the request request.add_header('User-Agent', 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0') request.add_header('Referrer', parsed_url.netloc) # Add more headers net_data = StringIO(urllib2.build_opener().open(request).read()) # Get the image data image = Image.open(net_data) # Open image in the Image library return process_image(image) # Get the image feature vector # Get property vector def process_image(image, blocks=4): if not image.mode == 'RGB': # Check the color mode of the image return None feature = [0] * blocks * blocks * blocks # Array of colors, the bigger the size, the more slow and accurate pixel_count = 0 for pixel in image.getdata(): # Get each pixel in the image adn divide each color to a gradient r_idx = int(pixel[0] / (256 / blocks)) # Calculate the red gradient in the pixel g_idx = int(pixel[1] / (256 / blocks)) # Calculate the green gradient in the pixel b_idx = int(pixel[2] / (256 / blocks)) # Calculate the blue gradient in the pixel # Calculate the darkness gradient based more in the blue output idx = r_idx + g_idx * blocks + b_idx * blocks * blocks feature[idx] += 1 # Add a point to the gradient of darkness pixel_count += 1 # Add to the pixel counter to normalize the size return [x / pixel_count for x in feature] # Return the vector normalized by the size # Train the neural network def train(training_path_a, training_path_b, print_metrics=True): if not os.path.isdir(training_path_a): raise IOError('%s is not a directory' % training_path_a) if not os.path.isdir(training_path_b): raise IOError('%s is not a directory' % training_path_b) training_a = process_directory(training_path_a) # Get the list of feature vectors from first directory training_b = process_directory(training_path_b) # Get the list of feature vectors from second directory # Join both lists data = training_a + training_b # Create a training vector corresponding to the characteristics of each type of image target = [1] * len(training_a) + [0] * len(training_b) # split training data in a train set and a test set. The test set will # contain 20% of the total x_train, x_test, y_train, y_test = cross_validation.train_test_split(data, target, test_size=0.20) # define the parameter search space parameters = {'kernel': ['linear', 'rbf'], 'C': [1, 10, 100, 1000], 'gamma': [0.01, 0.001, 0.0001]} # search for the best classifier within the search space and return it clf = grid_search.GridSearchCV(svm.SVC(), parameters).fit(x_train, y_train) classifier = clf.best_estimator_ if print_metrics: print() print('Parameters:', clf.best_params_) print() print('Best classifier score') print(metrics.classification_report(y_test, classifier.predict(x_test))) return classifier def main(): path_day = "/home/oscar/Documents/ISC/8vo/Sistemas_Inteligentes/Flickr/day" path_night = "/home/oscar/Documents/ISC/8vo/Sistemas_Inteligentes/Flickr/night" print('Training classifier...') classifier = train(path_day, path_night, print_metrics=False) # Train the classifier while True: try: print("Input an image url (enter to exit): "), image_url = raw_input() # Get an image to test if not image_url: break features = process_image_url(image_url) # Process testing image res = classifier.predict(features) # Predict the type of the image print(res) # Print value if res == 0: # Print readable version print("Night") else: print("Day") except (KeyboardInterrupt, EOFError): break except: exception = sys.exc_info()[0] print(exception) if __name__ == '__main__': main()
true
6bb3c5d8a955450e068189054a9fd3d6c47edb0c
Python
RManish76/python2_hactoberfest
/sum_of_n.py
UTF-8
102
4.0625
4
[]
no_license
n = int(input("Enter a number: ")) sum = 0 for i in range(n+1): sum = sum+i print(f"sum = {sum}")
true
ca8c1b410d64df44ca441eb600482cf9208ae923
Python
chrisdembia/BicycleParameters
/bicycleparameters/inertia.py
UTF-8
4,859
2.96875
3
[]
no_license
#!/usr/bin/env python from math import pi import numpy as np from uncertainties import unumpy, umath def rotate_inertia_tensor(I, angle): '''Returns inertia tensor rotated through angle. Only for 2D''' ca = umath.cos(angle) sa = umath.sin(angle) C = np.array([[ca, 0., -sa], [0., 1., 0.], [sa, 0., ca]]) Irot = np.dot(C, np.dot(I, C.T)) return Irot def principal_axes(I): '''Returns the principal moments of inertia and the orientation. Parameters ---------- I : ndarray, shape(3,3) An inertia tensor. Returns ------- Ip : ndarray, shape(3,) The principal moments of inertia. This is sorted smallest to largest. C : ndarray, shape(3,3) The rotation matrix. ''' Ip, C = np.linalg.eig(I) indices = np.argsort(Ip) Ip = Ip[indices] C = C.T[indices] return Ip, C def parallel_axis(Ic, m, d): '''Returns the moment of inertia of a body about a different point. Parameters ---------- Ic : ndarray, shape(3,3) The moment of inertia about the center of mass of the body with respect to an orthogonal coordinate system. m : float The mass of the body. d : ndarray, shape(3,) The distances along the three ordinates that located the new point relative to the center of mass of the body. Returns ------- I : ndarray, shape(3,3) The moment of inertia about of the body about a point located by d. ''' a = d[0] b = d[1] c = d[2] dMat = np.zeros((3, 3), dtype=object) dMat[0] = np.array([b**2 + c**2, -a * b, -a * c]) dMat[1] = np.array([-a * b, c**2 + a**2, -b * c]) dMat[2] = np.array([-a * c, -b * c, a**2 + b**2]) return Ic + m * dMat def inertia_components(jay, beta): '''Returns the 2D orthogonal inertia tensor. When at least three moments of inertia and their axes orientations are known relative to a common inertial frame of a planar object, the orthoganl moments of inertia relative the frame are computed. Parameters ---------- jay : ndarray, shape(n,) An array of at least three moments of inertia. (n >= 3) beta : ndarray, shape(n,) An array of orientation angles corresponding to the moments of inertia in jay. Returns ------- eye : ndarray, shape(3,) Ixx, Ixz, Izz ''' sb = unumpy.sin(beta) cb = unumpy.cos(beta) betaMat = unumpy.matrix(np.vstack((cb**2, -2 * sb * cb, sb**2)).T) eye = np.squeeze(np.asarray(np.dot(betaMat.I, jay))) return eye def tor_inertia(k, T): '''Calculate the moment of inertia for an ideal torsional pendulm Parameters: ----------- k: torsional stiffness T: period Returns: -------- I: moment of inertia ''' I = k * T**2 / 4. / pi**2 return I def compound_pendulum_inertia(m, g, l, T): '''Returns the moment of inertia for an object hung as a compound pendulum. Parameters ---------- m : float Mass of the pendulum. g : float Acceration due to gravity. l : float Length of the pendulum. T : float The period of oscillation. Returns ------- I : float Moment of interia of the pendulum. ''' I = (T / 2. / pi)**2. * m * g * l - m * l**2. return I def tube_inertia(l, m, ro, ri): '''Calculate the moment of inertia for a tube (or rod) where the x axis is aligned with the tube's axis. Parameters ---------- l : float The length of the tube. m : float The mass of the tube. ro : float The outer radius of the tube. ri : float The inner radius of the tube. Set this to zero if it is a rod instead of a tube. Returns ------- Ix : float Moment of inertia about tube axis. Iy, Iz : float Moment of inertia about normal axis. ''' Ix = m / 2. * (ro**2 + ri**2) Iy = m / 12. * (3 * ro**2 + 3 * ri**2 + l**2) Iz = Iy return Ix, Iy, Iz def total_com(coordinates, masses): '''Returns the center of mass of a group of objects if the indivdual centers of mass and mass is provided. coordinates : ndarray, shape(3,n) The rows are the x, y and z coordinates, respectively and the columns are for each object. masses : ndarray, shape(3,) An array of the masses of multiple objects, the order should correspond to the columns of coordinates. Returns ------- mT : float Total mass of the objects. cT : ndarray, shape(3,) The x, y, and z coordinates of the total center of mass. ''' products = masses * coordinates mT = np.sum(masses) cT = np.sum(products, axis=1) / mT return mT, cT
true
e86dbf1f8af9a83102aab966661d9c8dc9237047
Python
vidyasw/HRM-OrangeHRM
/OrangeHRM_Common/OrangeHRM_InputManagement/Text_utility.py
UTF-8
1,682
2.609375
3
[]
no_license
import linecache import os from OrangeHRM_Common.OrangeHRM_ReportUtilities.LogGenerator import logger_obj class Text_utilities(): def __init__(self): pass @staticmethod def create_specifications(src_file_name,des_file_name,spec_for_role): print(src_file_name) print(des_file_name) if not os.path.exists(src_file_name) and not os.path.exists(des_file_name): logger_obj.debug("Some parameters are missing plz check!!") started = False collected_lines = [] ''' get the staring line and ending line''' with open(src_file_name, "r") as srcFile: lines = [] for i, line in enumerate(srcFile, 1): if spec_for_role in line: started = True # print("started at line", i) # counts from zero ! lines.append(i) continue if started and line == '\n': # print("found an end of line", i) lines.append(i) break # print("Starting line and ending line no", lines) '''Read the lines between starting and ending line''' for l in range(lines[0], lines[len(lines) - 1]): # print(l) linecache.getline(src_file_name, l) # process line collected_lines.append(linecache.getline(src_file_name, l)) # print(collected_lines) srcFile.close() '''Write the lines in destination file''' F = open(des_file_name, "w") F.writelines(collected_lines) F.close()
true
daa85342f4598131f34591eee17dd6ff76c381d3
Python
kilohsakul/Noodler
/noodler/core.py
UTF-8
5,808
3.5
4
[]
no_license
""" Define basic data structures and types. A **segment automaton** is a NFA with ε-transitions such that no ε-transition is on a cycle. Segment automata are concatenation of _segments_. Classes ------- String_equation SingleSEQuery Abstract class for String-equation queries Types ----- Aut : awalipy.Automaton General type of automaton. AutConstraints : Dict[str, Aut] Automata as constraints for SE queries. SegAut : awalipy.Automaton Segment automaton. TransID : int ID of transition in automaton """ from typing import Dict, Type, Union, Sequence import awalipy Aut = awalipy.Automaton SegAut: Type[awalipy.Automaton] = awalipy.Automaton RE = awalipy.RatExp TransID = int AutConstraints = Dict[str, Aut] REConstraints = Dict[str, RE] StrConstraints = Dict[str, str] Constraints = Union[AutConstraints, REConstraints, StrConstraints] class StringEquation: # noinspection PyUnresolvedReferences """ The basic class of string equations. If ``self.left[i] == v`` then `v` _occurs_ on left at `i`. Attributes ---------- variables : str or list of str Variables that appear in the equation. left, right : str over vars or iterable of vars Left and right side of the equation. indices_l, indices_r : dict (vars → list of ints) Sequence of occurrences of `v` on each side for each var `v`. switched : StringEquation Pointer to equation with a switched sides. Public functions ---------------- get_side : {"left","right"} → str or iterable of vars Returns corresponding side of the equation Notes ----- By default ``self.switched.switched == self``. """ def __init__(self, left_side: str, right_side: str, switched=None, variables=None): """ Create a String_equation. If ``vars`` is string, each character of it is treated as a variable. If switched is not specified, create it. Parameters ---------- left_side, right_side : str over vars or list of vars switched : String_equation, default None Equation with switched sides. variables : str or list of str The variables that appear in the equation. """ if variables is None: variables = set(left_side).union(set(right_side)) self.vars = variables self.left = left_side self.right = right_side # Set indices dicts ind_l = {} ind_r = {} for var in self.vars: ind_l[var] = [i for i, v in enumerate(self.left) if v == var] ind_r[var] = [i for i, v in enumerate(self.right) if v == var] self.indices_l = ind_l self.indices_r = ind_r # Setup switched equation if switched is None: switched = StringEquation(self.right, self.left, switched=self, variables=self.vars) self.switched = switched def get_side(self, side: str) -> str: """ Return the left or right side of equation. Parameters ---------- side : {"left", "right"} Returns ------- self.left or self.right """ if side not in ["left", "right"]: raise ValueError("side must be 'left' or 'right'." f"Given {side}.") if side == "left": return self.left return self.right def __str__(self): """Print equation in the form of left=right.""" return f"{self.left} = {self.right}" def __repr__(self): return f"{self.__class__.__name__}: {self.left} = {self.right}" def is_straightline(equations: Sequence[StringEquation]) -> bool: """ Check if SE system is in Single Static Assignment (SSA) form. A system given as a sequence of equations belongs to the straight-line (a.k.a. Single Static Assignment) fragment if and only if: 1. Left sides of the equations consist of 1 variable only. This is, the system can be written as: x₀ = uvw x₁ = ... ... x_n = ... 2. x_i does not appear in any eq_j for j<i holds for all i. Parameters ---------- equations Returns ------- True is given SE system is in SSA form. """ definitions = set() for eq in equations[::-1]: if len(eq.left) != 1 or eq.left[0] in definitions: return False definitions.add(eq.left[0]) for var in eq.right: if var in definitions: return False return True def create_automata_constraints(constraints: Constraints) -> AutConstraints: """ Parse any constraints-like dictionary into automata constraints. Each entry in `constraints` is process independently and thus can be of a different type (even an automaton). Parameters ---------- constraints: Constraints Dict[str,RE] or Dict[str,Aut] Dictionary var → Union[RE,Aut] (where RE is either string or awalipy.RatExp) Returns ------- AutConstraint The automata representation of constraints """ res = {} for var, const in constraints.items(): if isinstance(const, Aut): res[var] = const continue if isinstance(const, str): const = awalipy.RatExp(const) if not isinstance(const, RE): raise TypeError("constraints must be a regular expression") # We use the Thompson's algorithm as the derivative-term-based one # often explodes with growing alphabet. res[var] = const.thompson().proper().minimal_automaton().trim() return res
true
c577129c81df836108eacf162fe7235ad3ee022d
Python
joerobmunoz/Inter-App-Messaging
/Server.py
UTF-8
3,112
2.890625
3
[]
no_license
__author__ = 'Joe Munoz' #!/usr/bin/env python """ Server implementation of a client/server inter-application messaging system. This is comprised of the main server, and alternate thread workers that push updates to existing socket connections. """ import Database import socket import sys import getopt import thread import time import select # Constants size = 1024 backlog = 10 host = 'localhost' port = 54307 # Main server class. Just call #start() and it deploys the DB and socket listener class Server: def __init__(self): self.address = (host, port) self.clients = dict() def start(self): db = Database.Deployment() db.deploy() self.soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.soc.bind(self.address) self.soc.listen(backlog) self.acceptor = thread.start_new_thread(self.accept, ()) # Kick off separate listener thread # TODO BUG # This thread needs to take a back seat, but this is not the right way. time.sleep(500000) def stop(self): self.soc = None def accept(self): try: input = [self.soc, sys.stdin] while self.soc: inputready, outputready, exceptready = select.select(input, [], []) for s in inputready: if s == self.soc: # handle the server socket connection, name = self.soc.accept() print str(name[0]) + ":" + str(name[1]) + "@ has connected" cli = Client(conn=connection, cl=name) self.clients[name] = cli cli.spoke = self.tell except socket.error as e: print "Error accepting socket connection\n:" + e.message self.soc.close() def tell(self, ip, msg): print str(ip[0]) + ":" + str(ip[1]) + "@ " + msg for id in self.clients: self.clients[id].hear(msg) # Server implementation of the Client Object. class Client: def __init__(self, conn, cl): self.soc = conn self.ip = cl self.cliname = cl self.spoke = None self.speaker = thread.start_new_thread(self.speak, ()) def speak(self): while self.soc: msg = self.soc.recv(size) if msg: self.spoke(self.cliname, msg) def hear(self, msg): self.soc.send(msg) # Command line args to set the host and port def main(argv): try: opts, args = getopt.getopt(argv, "hi:o:", ["host", "port"]) except getopt.GetoptError: print "Client.py -h <host> -p <post>\nDefaults to 'localhost' and '54322'" sys.exit(2) for opt, arg in opts: if opt == '-l': print "Client.py -h <host> -p <post>\nDefaults to " + str(host[0]) + " and " + str(port) sys.exit() elif opt in ("-h", "--host"): host = arg elif opt in ("-p", "--post"): port = arg Server().start() if __name__ == "__main__": main(sys.argv[1:])
true
f8a5e5cd93731fe20202fb14d82a86704207b874
Python
git-drashti/test1
/pythonexam23-2020/question4.py
UTF-8
59
2.828125
3
[]
no_license
a = input ("enter any sentence :") b = a.count() print(b)
true
e5dd52306809da30e7e92e89539762d8fd79284c
Python
Alexygui/python_test
/class_test/test_survey.py
UTF-8
629
3
3
[]
no_license
import unittest from class_test.survey import AnonymousSurvey class TestAnonymousSurvey(unittest.TestCase): def setUp(self): self.my_survey = AnonymousSurvey('') self.responses = ['English', 'Spanlish', 'Mandarin'] def test_store_single_response(self): self.my_survey.store_respose(self.responses[0]) self.assertIn("English", self.my_survey.responses) def test_stor_three_response(self): for response in self.responses: self.my_survey.store_respose(response) for response in self.responses: self.assertIn(response, self.my_survey.responses)
true
68e651db3e4ff14955942dd142976d07602e58bf
Python
miltonjdaz/learning_py
/sorry/testing_dicts.py
UTF-8
1,670
3.5625
4
[]
no_license
import create_paths # this script checks out how to loop dictionaries and how we can use the string keys and values in movement.py four_paths=create_paths.main() path_to_move_in=four_paths[0] first_spot=str(next(iter(path_to_move_in.keys()))) path_to_move_in[first_spot]="coin0" path_to_move_in["spot_49"]="coin1" rolled=6 coin_count=0 coins_on_board=[] for k, v in path_to_move_in.items(): if v!=None: coin_count+=1 coins_on_board.append(k) # keeping a list of dictionary keys with coins in them for single player # print(coint_count) furthest_coin_key=coins_on_board[-1] coin_to_move="" movement_counter=0 for k, v in path_to_move_in.items(): # once we finally come across the furthest coin # we store in coin_to_move var and empty out the dict value (v) if k==furthest_coin_key: print(k, v) coin_to_move=v path_to_move_in[k]=None else: v=v # trivial line which explains that nothing happens # if we already moved the coin out to be moved forward if path_to_move_in[furthest_coin_key]==None and movement_counter<rolled: movement_counter+=1 print(movement_counter) # if we have looped around enough times to catch the rolled amount elif movement_counter==rolled: path_to_move_in[k]=coin_to_move rolled=0 # breaks out of these conditionals because movement counter will never equal 0 else: path_to_move_in[k]=path_to_move_in[k] print(path_to_move_in) # this function should be working correctly path_to_move_in=four_paths[1] def path_is_empty(path_to_move_in): if(all(value is None for value in path_to_move_in.values())): return True else: return False print(path_is_empty(path_to_move_in))
true
1abbf3bc50e4ede452b22c0c38041756154dd9e9
Python
AdamZhouSE/pythonHomework
/Code/CodeRecords/2346/60737/268878.py
UTF-8
1,055
2.765625
3
[]
no_license
t = int(input()) while t: cmd = [int(n) for n in input().split( )] m, n = cmd[0], cmd[1] nums = [int(i) for i in input().split( )] mat = [] ret = [] for i in range(m): mat.append(nums[i*n:i*n+n]) tag = [[0 for i in range(n)] for j in range(m)] i, j = 0, 0 ret.append(mat[i][j]) tag[i][j] = 1 while (j+1<n and tag[i][j+1]==0) or (i+1<m and tag[i+1][j]==0) or (j-1>=0 and tag[i][j-1]==0) or (i-1 >=0 and tag[i-1][j]==0): while j+1 < n and tag[i][j+1] == 0: ret.append(mat[i][j+1]) tag[i][j+1] = 1 j += 1 while i+1 < m and tag[i+1][j] == 0: ret.append(mat[i+1][j]) tag[i+1][j] = 1 i += 1 while j-1>=0 and tag[i][j-1] == 0: ret.append(mat[i][j-1]) tag[i][j-1] = 1 j -= 1 while i-1>=0 and tag[i-1][j] == 0: ret.append(mat[i-1][j]) tag[i-1][j] = 1 i -= 1 for k in ret: print(k,end=" ") print() t -= 1
true
094fa591fbad0b72b7d17f1a34bdb4e9764111b0
Python
seumasmorrison/pywafo
/pywafo/src/wafo/integrate.py
UTF-8
45,310
2.671875
3
[]
no_license
from __future__ import division import warnings import numpy as np from numpy import pi, sqrt, ones, zeros # @UnresolvedImport from scipy import integrate as intg import scipy.special.orthogonal as ort from scipy import special as sp from wafo.plotbackend import plotbackend as plt from scipy.integrate import simps, trapz from wafo.demos import humps _EPS = np.finfo(float).eps _POINTS_AND_WEIGHTS = {} __all__ = ['dea3', 'clencurt', 'romberg', 'h_roots', 'j_roots', 'la_roots', 'p_roots', 'qrule', 'gaussq', 'richardson', 'quadgr', 'qdemo'] def dea3(v0, v1, v2): ''' Extrapolate a slowly convergent sequence Parameters ---------- v0,v1,v2 : array-like 3 values of a convergent sequence to extrapolate Returns ------- result : array-like extrapolated value abserr : array-like absolute error estimate Description ----------- DEA3 attempts to extrapolate nonlinearly to a better estimate of the sequence's limiting value, thus improving the rate of convergence. The routine is based on the epsilon algorithm of P. Wynn, see [1]_. Example ------- # integrate sin(x) from 0 to pi/2 >>> import numpy as np >>> Ei= np.zeros(3) >>> linfun = lambda k : np.linspace(0, np.pi/2., 2.**(k+5)+1) >>> for k in np.arange(3): ... x = linfun(k) ... Ei[k] = np.trapz(np.sin(x),x) >>> En, err = dea3(Ei[0],Ei[1],Ei[2]) >>> En, err (array([ 1.]), array([ 0.0002008])) >>> TrueErr = Ei-1. >>> TrueErr array([ -2.0080568e-04, -5.0199908e-05, -1.2549882e-05]) See also -------- dea Reference --------- .. [1] C. Brezinski (1977) "Acceleration de la convergence en analyse numerique", "Lecture Notes in Math.", vol. 584, Springer-Verlag, New York, 1977. ''' E0, E1, E2 = np.atleast_1d(v0, v1, v2) abs = np.abs # @ReservedAssignment max = np.maximum # @ReservedAssignment delta2, delta1 = E2 - E1, E1 - E0 err2, err1 = abs(delta2), abs(delta1) tol2, tol1 = max(abs(E2), abs(E1)) * _EPS, max(abs(E1), abs(E0)) * _EPS with warnings.catch_warnings(): warnings.simplefilter("ignore") # ignore division by zero and overflow ss = 1.0 / delta2 - 1.0 / delta1 smalle2 = (abs(ss * E1) <= 1.0e-3).ravel() result = 1.0 * E2 abserr = err1 + err2 + E2 * _EPS * 10.0 converged = (err1 <= tol1) & (err2 <= tol2).ravel() | smalle2 k4, = (1 - converged).nonzero() if k4.size > 0: result[k4] = E1[k4] + 1.0 / ss[k4] abserr[k4] = err1[k4] + err2[k4] + abs(result[k4] - E2[k4]) return result, abserr def clencurt(fun, a, b, n0=5, trace=False, args=()): ''' Numerical evaluation of an integral, Clenshaw-Curtis method. Parameters ---------- fun : callable a, b : array-like Lower and upper integration limit, respectively. n : integer defines number of evaluation points (default 5) Returns ------- Q = evaluated integral tol = Estimate of the approximation error Notes ----- CLENCURT approximates the integral of f(x) from a to b using an 2*n+1 points Clenshaw-Curtis formula. The error estimate is usually a conservative estimate of the approximation error. The integral is exact for polynomials of degree 2*n or less. Example ------- >>> import numpy as np >>> val,err = clencurt(np.exp,0,2) >>> abs(val-np.expm1(2))< err, err<1e-10 (array([ True], dtype=bool), array([ True], dtype=bool)) See also -------- simpson, gaussq References ---------- [1] Goodwin, E.T. (1961), "Modern Computing Methods", 2nd edition, New yourk: Philosophical Library, pp. 78--79 [2] Clenshaw, C.W. and Curtis, A.R. (1960), Numerische Matematik, Vol. 2, pp. 197--205 ''' # make sure n is even n = 2 * n0 a, b = np.atleast_1d(a, b) a_shape = a.shape af = a.ravel() bf = b.ravel() Na = np.prod(a_shape) s = np.r_[0:n + 1] s2 = np.r_[0:n + 1:2] s2.shape = (-1, 1) x1 = np.cos(np.pi * s / n) x1.shape = (-1, 1) x = x1 * (bf - af) / 2. + (bf + af) / 2 if hasattr(fun, '__call__'): f = fun(x) else: x0 = np.flipud(fun[:, 0]) n = len(x0) - 1 if abs(x - x0) > 1e-8: raise ValueError( 'Input vector x must equal cos(pi*s/n)*(b-a)/2+(b+a)/2') f = np.flipud(fun[:, 1::]) if trace: plt.plot(x, f, '+') # using a Gauss-Lobatto variant, i.e., first and last # term f(a) and f(b) is multiplied with 0.5 f[0, :] = f[0, :] / 2 f[n, :] = f[n, :] / 2 # % x = cos(pi*0:n/n) # % f = f(x) # % # % N+1 # % c(k) = (2/N) sum f''(n)*cos(pi*(2*k-2)*(n-1)/N), 1 <= k <= N/2+1. # % n=1 fft = np.fft.fft tmp = np.real(fft(f[:n, :], axis=0)) c = 2 / n * (tmp[0:n / 2 + 1, :] + np.cos(np.pi * s2) * f[n, :]) # % old call # % c = 2/n * cos(s2*s'*pi/n) * f c[0, :] = c[0, :] / 2 c[n / 2, :] = c[n / 2, :] / 2 # % alternative call # % c = dct(f) c = c[0:n / 2 + 1, :] / ((s2 - 1) * (s2 + 1)) Q = (af - bf) * np.sum(c, axis=0) # Q = (a-b).*sum( c(1:n/2+1,:)./repmat((s2-1).*(s2+1),1,Na)) abserr = (bf - af) * np.abs(c[n / 2, :]) if Na > 1: abserr = np.reshape(abserr, a_shape) Q = np.reshape(Q, a_shape) return Q, abserr def romberg(fun, a, b, releps=1e-3, abseps=1e-3): ''' Numerical integration with the Romberg method Parameters ---------- fun : callable function to integrate a, b : real scalars lower and upper integration limits, respectively. releps, abseps : scalar, optional requested relative and absolute error, respectively. Returns ------- Q : scalar value of integral abserr : scalar estimated absolute error of integral ROMBERG approximates the integral of F(X) from A to B using Romberg's method of integration. The function F must return a vector of output values if a vector of input values is given. Example ------- >>> import numpy as np >>> [q,err] = romberg(np.sqrt,0,10,0,1e-4) >>> q,err (array([ 21.0818511]), array([ 6.6163547e-05])) ''' h = b - a hMin = 1.0e-9 # Max size of extrapolation table tableLimit = max(min(np.round(np.log2(h / hMin)), 30), 3) rom = zeros((2, tableLimit)) rom[0, 0] = h * (fun(a) + fun(b)) / 2 ipower = 1 fp = ones(tableLimit) * 4 # Ih1 = 0 Ih2 = 0. Ih4 = rom[0, 0] abserr = Ih4 # epstab = zeros(1,decdigs+7) # newflg = 1 # [res,abserr,epstab,newflg] = dea(newflg,Ih4,abserr,epstab) two = 1 one = 0 for i in xrange(1, tableLimit): h *= 0.5 Un5 = np.sum(fun(a + np.arange(1, 2 * ipower, 2) * h)) * h # trapezoidal approximations # T2n = 0.5 * (Tn + Un) = 0.5*Tn + Un5 rom[two, 0] = 0.5 * rom[one, 0] + Un5 fp[i] = 4 * fp[i - 1] # Richardson extrapolation for k in xrange(i): rom[two, k + 1] = rom[two, k] + \ (rom[two, k] - rom[one, k]) / (fp[k] - 1) Ih1 = Ih2 Ih2 = Ih4 Ih4 = rom[two, i] if (2 <= i): res, abserr = dea3(Ih1, Ih2, Ih4) # Ih4 = res if (abserr <= max(abseps, releps * abs(res))): break # rom(1,1:i) = rom(2,1:i) two = one one = (one + 1) % 2 ipower *= 2 return res, abserr def h_roots(n, method='newton'): ''' Returns the roots (x) of the nth order Hermite polynomial, H_n(x), and weights (w) to use in Gaussian Quadrature over [-inf,inf] with weighting function exp(-x**2). Parameters ---------- n : integer number of roots method : 'newton' or 'eigenvalue' uses Newton Raphson to find zeros of the Hermite polynomial (Fast) or eigenvalue of the jacobi matrix (Slow) to obtain the nodes and weights, respectively. Returns ------- x : ndarray roots w : ndarray weights Example ------- >>> import numpy as np >>> [x,w] = h_roots(10) >>> np.sum(x*w) -5.2516042729766621e-19 See also -------- qrule, gaussq References ---------- [1] Golub, G. H. and Welsch, J. H. (1969) 'Calculation of Gaussian Quadrature Rules' Mathematics of Computation, vol 23,page 221-230, [2]. Stroud and Secrest (1966), 'gaussian quadrature formulas', prentice-hall, Englewood cliffs, n.j. ''' if not method.startswith('n'): return ort.h_roots(n) else: sqrt = np.sqrt max_iter = 10 releps = 3e-14 C = [9.084064e-01, 5.214976e-02, 2.579930e-03, 3.986126e-03] # PIM4=0.7511255444649425 PIM4 = np.pi ** (-1. / 4) # The roots are symmetric about the origin, so we have to # find only half of them. m = int(np.fix((n + 1) / 2)) # Initial approximations to the roots go into z. anu = 2.0 * n + 1 rhs = np.arange(3, 4 * m, 4) * np.pi / anu r3 = rhs ** (1. / 3) r2 = r3 ** 2 theta = r3 * (C[0] + r2 * (C[1] + r2 * (C[2] + r2 * C[3]))) z = sqrt(anu) * np.cos(theta) L = zeros((3, len(z))) k0 = 0 kp1 = 1 for _its in xrange(max_iter): # Newtons method carried out simultaneously on the roots. L[k0, :] = 0 L[kp1, :] = PIM4 for j in xrange(1, n + 1): # Loop up the recurrence relation to get the Hermite # polynomials evaluated at z. km1 = k0 k0 = kp1 kp1 = np.mod(kp1 + 1, 3) L[kp1, :] = (z * sqrt(2 / j) * L[k0, :] - np.sqrt((j - 1) / j) * L[km1, :]) # L now contains the desired Hermite polynomials. # We next compute pp, the derivatives, # by the relation (4.5.21) using p2, the polynomials # of one lower order. pp = sqrt(2 * n) * L[k0, :] dz = L[kp1, :] / pp z = z - dz # Newtons formula. if not np.any(abs(dz) > releps): break else: warnings.warn('too many iterations!') x = np.empty(n) w = np.empty(n) x[0:m] = z # Store the root x[n - 1:n - m - 1:-1] = -z # and its symmetric counterpart. w[0:m] = 2. / pp ** 2 # Compute the weight w[n - 1:n - m - 1:-1] = w[0:m] # and its symmetric counterpart. return x, w def j_roots(n, alpha, beta, method='newton'): ''' Returns the roots of the nth order Jacobi polynomial, P^(alpha,beta)_n(x) and weights (w) to use in Gaussian Quadrature over [-1,1] with weighting function (1-x)**alpha (1+x)**beta with alpha,beta > -1. Parameters ---------- n : integer number of roots alpha,beta : scalars defining shape of Jacobi polynomial method : 'newton' or 'eigenvalue' uses Newton Raphson to find zeros of the Hermite polynomial (Fast) or eigenvalue of the jacobi matrix (Slow) to obtain the nodes and weights, respectively. Returns ------- x : ndarray roots w : ndarray weights Example -------- >>> [x,w]= j_roots(10,0,0) >>> sum(x*w) 2.7755575615628914e-16 See also -------- qrule, gaussq Reference --------- [1] Golub, G. H. and Welsch, J. H. (1969) 'Calculation of Gaussian Quadrature Rules' Mathematics of Computation, vol 23,page 221-230, [2]. Stroud and Secrest (1966), 'gaussian quadrature formulas', prentice-hall, Englewood cliffs, n.j. ''' if not method.startswith('n'): [x, w] = ort.j_roots(n, alpha, beta) else: max_iter = 10 releps = 3e-14 # Initial approximations to the roots go into z. alfbet = alpha + beta z = np.cos(np.pi * (np.arange(1, n + 1) - 0.25 + 0.5 * alpha) / (n + 0.5 * (alfbet + 1))) L = zeros((3, len(z))) k0 = 0 kp1 = 1 for _its in xrange(max_iter): # Newton's method carried out simultaneously on the roots. tmp = 2 + alfbet L[k0, :] = 1 L[kp1, :] = (alpha - beta + tmp * z) / 2 for j in xrange(2, n + 1): # Loop up the recurrence relation to get the Jacobi # polynomials evaluated at z. km1 = k0 k0 = kp1 kp1 = np.mod(kp1 + 1, 3) a = 2. * j * (j + alfbet) * tmp tmp = tmp + 2 c = 2 * (j - 1 + alpha) * (j - 1 + beta) * tmp b = (tmp - 1) * (alpha ** 2 - beta ** 2 + tmp * (tmp - 2) * z) L[kp1, :] = (b * L[k0, :] - c * L[km1, :]) / a # L now contains the desired Jacobi polynomials. # We next compute pp, the derivatives with a standard # relation involving the polynomials of one lower order. pp = ((n * (alpha - beta - tmp * z) * L[kp1, :] + 2 * (n + alpha) * (n + beta) * L[k0, :]) / (tmp * (1 - z ** 2))) dz = L[kp1, :] / pp z = z - dz # Newton's formula. if not any(abs(dz) > releps * abs(z)): break else: warnings.warn('too many iterations in jrule') x = z # %Store the root and the weight. f = (sp.gammaln(alpha + n) + sp.gammaln(beta + n) - sp.gammaln(n + 1) - sp.gammaln(alpha + beta + n + 1)) w = (np.exp(f) * tmp * 2 ** alfbet / (pp * L[k0, :])) return x, w def la_roots(n, alpha=0, method='newton'): ''' Returns the roots (x) of the nth order generalized (associated) Laguerre polynomial, L^(alpha)_n(x), and weights (w) to use in Gaussian quadrature over [0,inf] with weighting function exp(-x) x**alpha with alpha > -1. Parameters ---------- n : integer number of roots method : 'newton' or 'eigenvalue' uses Newton Raphson to find zeros of the Laguerre polynomial (Fast) or eigenvalue of the jacobi matrix (Slow) to obtain the nodes and weights, respectively. Returns ------- x : ndarray roots w : ndarray weights Example ------- >>> import numpy as np >>> [x,w] = h_roots(10) >>> np.sum(x*w) -5.2516042729766621e-19 See also -------- qrule, gaussq References ---------- [1] Golub, G. H. and Welsch, J. H. (1969) 'Calculation of Gaussian Quadrature Rules' Mathematics of Computation, vol 23,page 221-230, [2]. Stroud and Secrest (1966), 'gaussian quadrature formulas', prentice-hall, Englewood cliffs, n.j. ''' if alpha <= -1: raise ValueError('alpha must be greater than -1') if not method.startswith('n'): return ort.la_roots(n, alpha) else: max_iter = 10 releps = 3e-14 C = [9.084064e-01, 5.214976e-02, 2.579930e-03, 3.986126e-03] # Initial approximations to the roots go into z. anu = 4.0 * n + 2.0 * alpha + 2.0 rhs = np.arange(4 * n - 1, 2, -4) * np.pi / anu r3 = rhs ** (1. / 3) r2 = r3 ** 2 theta = r3 * (C[0] + r2 * (C[1] + r2 * (C[2] + r2 * C[3]))) z = anu * np.cos(theta) ** 2 dz = zeros(len(z)) L = zeros((3, len(z))) Lp = zeros((1, len(z))) pp = zeros((1, len(z))) k0 = 0 kp1 = 1 k = slice(len(z)) for _its in xrange(max_iter): # Newton's method carried out simultaneously on the roots. L[k0, k] = 0. L[kp1, k] = 1. for jj in xrange(1, n + 1): # Loop up the recurrence relation to get the Laguerre # polynomials evaluated at z. km1 = k0 k0 = kp1 kp1 = np.mod(kp1 + 1, 3) L[kp1, k] = ((2 * jj - 1 + alpha - z[k]) * L[ k0, k] - (jj - 1 + alpha) * L[km1, k]) / jj # end # L now contains the desired Laguerre polynomials. # We next compute pp, the derivatives with a standard # relation involving the polynomials of one lower order. Lp[k] = L[k0, k] pp[k] = (n * L[kp1, k] - (n + alpha) * Lp[k]) / z[k] dz[k] = L[kp1, k] / pp[k] z[k] = z[k] - dz[k] # % Newton?s formula. # k = find((abs(dz) > releps.*z)) if not np.any(abs(dz) > releps): break else: warnings.warn('too many iterations!') x = z w = -np.exp(sp.gammaln(alpha + n) - sp.gammaln(n)) / (pp * n * Lp) return x, w def p_roots(n, method='newton', a=-1, b=1): ''' Returns the roots (x) of the nth order Legendre polynomial, P_n(x), and weights (w) to use in Gaussian Quadrature over [-1,1] with weighting function 1. Parameters ---------- n : integer number of roots method : 'newton' or 'eigenvalue' uses Newton Raphson to find zeros of the Hermite polynomial (Fast) or eigenvalue of the jacobi matrix (Slow) to obtain the nodes and weights, respectively. Returns ------- x : ndarray roots w : ndarray weights Example ------- Integral of exp(x) from a = 0 to b = 3 is: exp(3)-exp(0)= >>> import numpy as np >>> [x,w] = p_roots(11,a=0,b=3) >>> np.sum(np.exp(x)*w) 19.085536923187668 See also -------- quadg. References ---------- [1] Davis and Rabinowitz (1975) 'Methods of Numerical Integration', page 365, Academic Press. [2] Golub, G. H. and Welsch, J. H. (1969) 'Calculation of Gaussian Quadrature Rules' Mathematics of Computation, vol 23,page 221-230, [3] Stroud and Secrest (1966), 'gaussian quadrature formulas', prentice-hall, Englewood cliffs, n.j. ''' if not method.startswith('n'): x, w = ort.p_roots(n) else: m = int(np.fix((n + 1) / 2)) mm = 4 * m - 1 t = (np.pi / (4 * n + 2)) * np.arange(3, mm + 1, 4) nn = (1 - (1 - 1 / n) / (8 * n * n)) xo = nn * np.cos(t) if method.endswith('1'): # Compute the zeros of the N+1 Legendre Polynomial # using the recursion relation and the Newton-Raphson method # Legendre-Gauss Polynomials L = zeros((3, m)) # Derivative of LGP Lp = zeros((m,)) dx = zeros((m,)) releps = 1e-15 max_iter = 100 # Compute the zeros of the N+1 Legendre Polynomial # using the recursion relation and the Newton-Raphson method # Iterate until new points are uniformly within epsilon of old # points k = slice(m) k0 = 0 kp1 = 1 for _ix in xrange(max_iter): L[k0, k] = 1 L[kp1, k] = xo[k] for jj in xrange(2, n + 1): km1 = k0 k0 = kp1 kp1 = np.mod(k0 + 1, 3) L[kp1, k] = ((2 * jj - 1) * xo[k] * L[ k0, k] - (jj - 1) * L[km1, k]) / jj Lp[k] = n * (L[k0, k] - xo[k] * L[kp1, k]) / (1 - xo[k] ** 2) dx[k] = L[kp1, k] / Lp[k] xo[k] = xo[k] - dx[k] k, = np.nonzero((abs(dx) > releps * np.abs(xo))) if len(k) == 0: break else: warnings.warn('Too many iterations!') x = -xo w = 2. / ((1 - x ** 2) * (Lp ** 2)) else: # Algorithm given by Davis and Rabinowitz in 'Methods # of Numerical Integration', page 365, Academic Press, 1975. e1 = n * (n + 1) for _j in xrange(2): pkm1 = 1 pk = xo for k in xrange(2, n + 1): t1 = xo * pk pkp1 = t1 - pkm1 - (t1 - pkm1) / k + t1 pkm1 = pk pk = pkp1 den = 1. - xo * xo d1 = n * (pkm1 - xo * pk) dpn = d1 / den d2pn = (2. * xo * dpn - e1 * pk) / den d3pn = (4. * xo * d2pn + (2 - e1) * dpn) / den d4pn = (6. * xo * d3pn + (6 - e1) * d2pn) / den u = pk / dpn v = d2pn / dpn h = (-u * (1 + (.5 * u) * (v + u * (v * v - u * d3pn / (3 * dpn))))) p = (pk + h * (dpn + (.5 * h) * (d2pn + (h / 3) * (d3pn + .25 * h * d4pn)))) dp = dpn + h * (d2pn + (.5 * h) * (d3pn + h * d4pn / 3)) h = h - p / dp xo = xo + h x = -xo - h fx = (d1 - h * e1 * (pk + (h / 2) * (dpn + (h / 3) * (d2pn + (h / 4) * (d3pn + (.2 * h) * d4pn))))) w = 2 * (1 - x ** 2) / (fx ** 2) if (m + m) > n: x[m - 1] = 0.0 if not ((m + m) == n): m = m - 1 x = np.hstack((x, -x[m - 1::-1])) w = np.hstack((w, w[m - 1::-1])) if (a != -1) | (b != 1): # Linear map from[-1,1] to [a,b] dh = (b - a) / 2 x = dh * (x + 1) + a w = w * dh return x, w def qrule(n, wfun=1, alpha=0, beta=0): ''' Return nodes and weights for Gaussian quadratures. Parameters ---------- n : integer number of base points wfun : integer defining the weight function, p(x). (default wfun = 1) 1,11,21: p(x) = 1 a =-1, b = 1 Gauss-Legendre 2,12 : p(x) = exp(-x^2) a =-inf, b = inf Hermite 3,13 : p(x) = x^alpha*exp(-x) a = 0, b = inf Laguerre 4,14 : p(x) = (x-a)^alpha*(b-x)^beta a =-1, b = 1 Jacobi 5 : p(x) = 1/sqrt((x-a)*(b-x)), a =-1, b = 1 Chebyshev 1'st kind 6 : p(x) = sqrt((x-a)*(b-x)), a =-1, b = 1 Chebyshev 2'nd kind 7 : p(x) = sqrt((x-a)/(b-x)), a = 0, b = 1 8 : p(x) = 1/sqrt(b-x), a = 0, b = 1 9 : p(x) = sqrt(b-x), a = 0, b = 1 Returns ------- bp = base points (abscissas) wf = weight factors The Gaussian Quadrature integrates a (2n-1)th order polynomial exactly and the integral is of the form b n Int ( p(x)* F(x) ) dx = Sum ( wf_j* F( bp_j ) ) a j=1 where p(x) is the weight function. For Jacobi and Laguerre: alpha, beta >-1 (default alpha=beta=0) Examples: --------- >>> [bp,wf] = qrule(10) >>> sum(bp**2*wf) # integral of x^2 from a = -1 to b = 1 0.66666666666666641 >>> [bp,wf] = qrule(10,2) >>> sum(bp**2*wf) # integral of exp(-x.^2)*x.^2 from a = -inf to b = inf 0.88622692545275772 >>> [bp,wf] = qrule(10,4,1,2) >>> (bp*wf).sum() # integral of (x+1)*(1-x)^2 from a = -1 to b = 1 0.26666666666666755 See also -------- gaussq Reference --------- Abromowitz and Stegun (1954) (for method 5 to 9) ''' if (alpha <= -1) | (beta <= -1): raise ValueError('alpha and beta must be greater than -1') if wfun == 1: # Gauss-Legendre [bp, wf] = p_roots(n) elif wfun == 2: # Hermite [bp, wf] = h_roots(n) elif wfun == 3: # Generalized Laguerre [bp, wf] = la_roots(n, alpha) elif wfun == 4: # Gauss-Jacobi [bp, wf] = j_roots(n, alpha, beta) elif wfun == 5: # p(x)=1/sqrt((x-a)*(b-x)), a=-1 and b=1 (default) jj = np.arange(1, n + 1) wf = ones(n) * np.pi / n bp = np.cos((2 * jj - 1) * np.pi / (2 * n)) elif wfun == 6: # p(x)=sqrt((x-a)*(b-x)), a=-1 and b=1 jj = np.arange(1, n + 1) xj = jj * np.pi / (n + 1) wf = np.pi / (n + 1) * np.sin(xj) ** 2 bp = np.cos(xj) elif wfun == 7: # p(x)=sqrt((x-a)/(b-x)), a=0 and b=1 jj = np.arange(1, n + 1) xj = (jj - 0.5) * pi / (2 * n + 1) bp = np.cos(xj) ** 2 wf = 2 * np.pi * bp / (2 * n + 1) elif wfun == 8: # p(x)=1/sqrt(b-x), a=0 and b=1 [bp1, wf1] = p_roots(2 * n) k, = np.where(0 <= bp1) wf = 2 * wf1[k] bp = 1 - bp1[k] ** 2 elif wfun == 9: # p(x)=np.sqrt(b-x), a=0 and b=1 [bp1, wf1] = p_roots(2 * n + 1) k, = np.where(0 < bp1) wf = 2 * bp1[k] ** 2 * wf1[k] bp = 1 - bp1[k] ** 2 else: raise ValueError('unknown weight function') return bp, wf class _Gaussq(object): ''' Numerically evaluate integral, Gauss quadrature. Parameters ---------- fun : callable a,b : array-like lower and upper integration limits, respectively. releps, abseps : real scalars, optional relative and absolute tolerance, respectively. (default releps=abseps=1e-3). wfun : scalar integer, optional defining the weight function, p(x). (default wfun = 1) 1 : p(x) = 1 a =-1, b = 1 Gauss-Legendre 2 : p(x) = exp(-x^2) a =-inf, b = inf Hermite 3 : p(x) = x^alpha*exp(-x) a = 0, b = inf Laguerre 4 : p(x) = (x-a)^alpha*(b-x)^beta a =-1, b = 1 Jacobi 5 : p(x) = 1/sqrt((x-a)*(b-x)), a =-1, b = 1 Chebyshev 1'st kind 6 : p(x) = sqrt((x-a)*(b-x)), a =-1, b = 1 Chebyshev 2'nd kind 7 : p(x) = sqrt((x-a)/(b-x)), a = 0, b = 1 8 : p(x) = 1/sqrt(b-x), a = 0, b = 1 9 : p(x) = sqrt(b-x), a = 0, b = 1 trace : bool, optional If non-zero a point plot of the integrand (default False). gn : scalar integer number of base points to start the integration with (default 2). alpha, beta : real scalars, optional Shape parameters of Laguerre or Jacobi weight function (alpha,beta>-1) (default alpha=beta=0) Returns ------- val : ndarray evaluated integral err : ndarray error estimate, absolute tolerance abs(int-intold) Notes ----- GAUSSQ numerically evaluate integral using a Gauss quadrature. The Quadrature integrates a (2m-1)th order polynomial exactly and the integral is of the form b Int (p(x)* Fun(x)) dx a GAUSSQ is vectorized to accept integration limits A, B and coefficients P1,P2,...Pn, as matrices or scalars and the result is the common size of A, B and P1,P2,...,Pn. Examples --------- integration of x**2 from 0 to 2 and from 1 to 4 >>> from scitools import numpyutils as npt >>> A = [0, 1]; B = [2,4] >>> fun = npt.wrap2callable('x**2') >>> [val1,err1] = gaussq(fun,A,B) >>> val1 array([ 2.6666667, 21. ]) >>> err1 array([ 1.7763568e-15, 1.0658141e-14]) Integration of x^2*exp(-x) from zero to infinity: >>> fun2 = npt.wrap2callable('1') >>> val2, err2 = gaussq(fun2, 0, npt.inf, wfun=3, alpha=2) >>> val3, err3 = gaussq(lambda x: x**2,0, npt.inf, wfun=3, alpha=0) >>> val2, err2 (array([ 2.]), array([ 6.6613381e-15])) >>> val3, err3 (array([ 2.]), array([ 1.7763568e-15])) Integrate humps from 0 to 2 and from 1 to 4 >>> val4, err4 = gaussq(humps,A,B) See also -------- qrule gaussq2d ''' def _get_dx(self, wfun, jacob, alpha, beta): if wfun in [1, 2, 3, 7]: dx = jacob elif wfun == 4: dx = jacob ** (alpha + beta + 1) elif wfun == 5: dx = ones((np.size(jacob), 1)) elif wfun == 6: dx = jacob ** 2 elif wfun == 8: dx = sqrt(jacob) elif wfun == 9: dx = sqrt(jacob) ** 3 else: raise ValueError('unknown option') return dx.ravel() def _points_and_weights(self, gn, wfun, alpha, beta): global _POINTS_AND_WEIGHTS name = 'wfun%d_%d_%g_%g' % (wfun, gn, alpha, beta) x_and_w = _POINTS_AND_WEIGHTS.setdefault(name, []) if len(x_and_w) == 0: x_and_w.extend(qrule(gn, wfun, alpha, beta)) xn, w = x_and_w return xn, w def _initialize_trace(self, max_iter): if self.trace: self.x_trace = [0] * max_iter self.y_trace = [0] * max_iter def _plot_trace(self, x, y): if self.trace: self.x_trace.append(x.ravel()) self.y_trace.append(y.ravel()) hfig = plt.plot(x, y, 'r.') plt.setp(hfig, 'color', 'b') def _plot_final_trace(self): if self.trace > 0: plt.clf() plt.plot(np.hstack(self.x_trace), np.hstack(self.y_trace), '+') def _get_jacob(self, wfun, A, B): if wfun in [2, 3]: nk = np.size(A) jacob = ones((nk, 1)) else: jacob = (B - A) * 0.5 if wfun in [7, 8, 9]: jacob = jacob * 2 return jacob def _warn(self, k, a_shape): nk = len(k) if nk > 1: if (nk == np.prod(a_shape)): tmptxt = 'All integrals did not converge' else: tmptxt = '%d integrals did not converge' % (nk, ) tmptxt = tmptxt + '--singularities likely!' else: tmptxt = 'Integral did not converge--singularity likely!' warnings.warn(tmptxt) def _initialize(self, wfun, a, b, args): args = np.broadcast_arrays(*np.atleast_1d(a, b, *args)) a_shape = args[0].shape args = map(lambda x: np.reshape(x, (-1, 1)), args) A, B = args[:2] args = args[2:] if wfun in [2, 3]: A = zeros((A.size, 1)) return A, B, args, a_shape def __call__(self, fun, a, b, releps=1e-3, abseps=1e-3, alpha=0, beta=0, wfun=1, trace=False, args=(), max_iter=11): self.trace = trace gn = 2 A, B, args, a_shape = self._initialize(wfun, a, b, args) jacob = self._get_jacob(wfun, A, B) shift = int(wfun in [1, 4, 5, 6]) dx = self._get_dx(wfun, jacob, alpha, beta) self._initialize_trace(max_iter) # Break out of the iteration loop for three reasons: # 1) the last update is very small (compared to int and to releps) # 2) There are more than 11 iterations. This should NEVER happen. dtype = np.result_type(fun((A+B)*0.5, *args)) nk = np.prod(a_shape) # # of integrals we have to compute k = np.arange(nk) opts = (nk, dtype) val, val_old, abserr = zeros(*opts), ones(*opts), zeros(*opts) for i in xrange(max_iter): xn, w = self._points_and_weights(gn, wfun, alpha, beta) x = (xn + shift) * jacob[k, :] + A[k, :] pi = [xi[k, :] for xi in args] y = fun(x, *pi) self._plot_trace(x, y) val[k] = np.sum(w * y, axis=1) * dx[k] # do the integration if any(np.isnan(val)): val[np.isnan(val)] = val_old[np.isnan(val)] if 1 < i: abserr[k] = abs(val_old[k] - val[k]) # absolute tolerance k, = np.where(abserr > np.maximum(abs(releps * val), abseps)) nk = len(k) # of integrals we have to compute again if nk == 0: break val_old[k] = val[k] gn *= 2 # double the # of basepoints and weights else: self._warn(k, a_shape) # make sure int is the same size as the integration limits val.shape = a_shape abserr.shape = a_shape self._plot_final_trace() return val, abserr gaussq = _Gaussq() def richardson(Q, k): # license BSD # Richardson extrapolation with parameter estimation c = np.real((Q[k - 1] - Q[k - 2]) / (Q[k] - Q[k - 1])) - 1. # The lower bound 0.07 admits the singularity x.^-0.9 c = max(c, 0.07) R = Q[k] + (Q[k] - Q[k - 1]) / c return R def quadgr(fun, a, b, abseps=1e-5, max_iter=17): ''' Gauss-Legendre quadrature with Richardson extrapolation. [Q,ERR] = QUADGR(FUN,A,B,TOL) approximates the integral of a function FUN from A to B with an absolute error tolerance TOL. FUN is a function handle and must accept vector arguments. TOL is 1e-6 by default. Q is the integral approximation and ERR is an estimate of the absolute error. QUADGR uses a 12-point Gauss-Legendre quadrature. The error estimate is based on successive interval bisection. Richardson extrapolation accelerates the convergence for some integrals, especially integrals with endpoint singularities. Examples -------- >>> import numpy as np >>> Q, err = quadgr(np.log,0,1) >>> quadgr(np.exp,0,9999*1j*np.pi) (-2.0000000000122662, 2.1933237448479304e-09) >>> quadgr(lambda x: np.sqrt(4-x**2),0,2,1e-12) (3.1415926535897811, 1.5809575870662229e-13) >>> quadgr(lambda x: x**-0.75,0,1) (4.0000000000000266, 5.6843418860808015e-14) >>> quadgr(lambda x: 1./np.sqrt(1-x**2),-1,1) (3.141596056985029, 6.2146261559092864e-06) >>> quadgr(lambda x: np.exp(-x**2),-np.inf,np.inf,1e-9) #% sqrt(pi) (1.7724538509055152, 1.9722334876348668e-11) >>> quadgr(lambda x: np.cos(x)*np.exp(-x),0,np.inf,1e-9) (0.50000000000000044, 7.3296813063450372e-11) See also -------- QUAD, QUADGK ''' # Author: jonas.lundgren@saabgroup.com, 2009. license BSD # Order limits (required if infinite limits) if a == b: Q = b - a err = b - a return Q, err elif np.real(a) > np.real(b): reverse = True a, b = b, a else: reverse = False # Infinite limits if np.isinf(a) | np.isinf(b): # Check real limits if ~ np.isreal(a) | ~np.isreal(b) | np.isnan(a) | np.isnan(b): raise ValueError('Infinite intervals must be real.') # Change of variable if np.isfinite(a) & np.isinf(b): # a to inf fun1 = lambda t: fun(a + t / (1 - t)) / (1 - t) ** 2 [Q, err] = quadgr(fun1, 0, 1, abseps) elif np.isinf(a) & np.isfinite(b): # -inf to b fun2 = lambda t: fun(b + t / (1 + t)) / (1 + t) ** 2 [Q, err] = quadgr(fun2, -1, 0, abseps) else: # -inf to inf fun1 = lambda t: fun(t / (1 - t)) / (1 - t) ** 2 fun2 = lambda t: fun(t / (1 + t)) / (1 + t) ** 2 [Q1, err1] = quadgr(fun1, 0, 1, abseps / 2) [Q2, err2] = quadgr(fun2, -1, 0, abseps / 2) Q = Q1 + Q2 err = err1 + err2 # Reverse direction if reverse: Q = -Q return Q, err # Gauss-Legendre quadrature (12-point) xq = np.asarray( [0.12523340851146894, 0.36783149899818018, 0.58731795428661748, 0.76990267419430469, 0.9041172563704748, 0.98156063424671924]) wq = np.asarray( [0.24914704581340288, 0.23349253653835478, 0.20316742672306584, 0.16007832854334636, 0.10693932599531818, 0.047175336386511842]) xq = np.hstack((xq, -xq)) wq = np.hstack((wq, wq)) nq = len(xq) dtype = np.result_type(fun(a), fun(b)) # Initiate vectors Q0 = zeros(max_iter, dtype=dtype) # Quadrature Q1 = zeros(max_iter, dtype=dtype) # First Richardson extrapolation Q2 = zeros(max_iter, dtype=dtype) # Second Richardson extrapolation # One interval hh = (b - a) / 2 # Half interval length x = (a + b) / 2 + hh * xq # Nodes # Quadrature Q0[0] = hh * np.sum(wq * fun(x), axis=0) # Successive bisection of intervals for k in xrange(1, max_iter): # Interval bisection hh = hh / 2 x = np.hstack([x + a, x + b]) / 2 # Quadrature Q0[k] = hh * \ np.sum(wq * np.sum(np.reshape(fun(x), (-1, nq)), axis=0), axis=0) # Richardson extrapolation if k >= 5: Q1[k] = richardson(Q0, k) Q2[k] = richardson(Q1, k) elif k >= 3: Q1[k] = richardson(Q0, k) # Estimate absolute error if k >= 6: Qv = np.hstack((Q0[k], Q1[k], Q2[k])) Qw = np.hstack((Q0[k - 1], Q1[k - 1], Q2[k - 1])) elif k >= 4: Qv = np.hstack((Q0[k], Q1[k])) Qw = np.hstack((Q0[k - 1], Q1[k - 1])) else: Qv = np.atleast_1d(Q0[k]) Qw = Q0[k - 1] errors = np.atleast_1d(abs(Qv - Qw)) j = errors.argmin() err = errors[j] Q = Qv[j] if k >= 2: # and not iscomplex: _val, err1 = dea3(Q0[k - 2], Q0[k - 1], Q0[k]) # Convergence if (err < abseps) | ~np.isfinite(Q): break else: warnings.warn('Max number of iterations reached without convergence.') if ~ np.isfinite(Q): warnings.warn('Integral approximation is Infinite or NaN.') # The error estimate should not be zero err = err + 2 * np.finfo(Q).eps # Reverse direction if reverse: Q = -Q return Q, err def boole(y, x): a, b = x[0], x[-1] n = len(x) h = (b - a) / (n - 1) return (2 * h / 45) * (7 * (y[0] + y[-1]) + 12 * np.sum(y[2:n - 1:4]) + 32 * np.sum(y[1:n - 1:2]) + 14 * np.sum(y[4:n - 3:4])) def qdemo(f, a, b, kmax=9, plot_error=False): ''' Compares different quadrature rules. Parameters ---------- f : callable function a,b : scalars lower and upper integration limits Details ------- qdemo(f,a,b) computes and compares various approximations to the integral of f from a to b. Three approximations are used, the composite trapezoid, Simpson's, and Boole's rules, all with equal length subintervals. In a case like qdemo(exp,0,3) one can see the expected convergence rates for each of the three methods. In a case like qdemo(sqrt,0,3), the convergence rate is limited not by the method, but by the singularity of the integrand. Example ------- >>> import numpy as np >>> qdemo(np.exp,0,3) true value = 19.08553692 ftn, Boole, Chebychev evals approx error approx error 3, 19.4008539142, 0.3153169910, 19.5061466023, 0.4206096791 5, 19.0910191534, 0.0054822302, 19.0910191534, 0.0054822302 9, 19.0856414320, 0.0001045088, 19.0855374134, 0.0000004902 17, 19.0855386464, 0.0000017232, 19.0855369232, 0.0000000000 33, 19.0855369505, 0.0000000273, 19.0855369232, 0.0000000000 65, 19.0855369236, 0.0000000004, 19.0855369232, 0.0000000000 129, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 257, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 513, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 ftn, Clenshaw-Curtis, Gauss-Legendre evals approx error approx error 3, 19.5061466023, 0.4206096791, 19.0803304585, 0.0052064647 5, 19.0834145766, 0.0021223465, 19.0855365951, 0.0000003281 9, 19.0855369150, 0.0000000082, 19.0855369232, 0.0000000000 17, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 33, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 65, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 129, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 257, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 513, 19.0855369232, 0.0000000000, 19.0855369232, 0.0000000000 ftn, Simps, Trapz evals approx error approx error 3, 19.5061466023, 0.4206096791, 22.5366862979, 3.4511493747 5, 19.1169646189, 0.0314276957, 19.9718950387, 0.8863581155 9, 19.0875991312, 0.0020622080, 19.3086731081, 0.2231361849 17, 19.0856674267, 0.0001305035, 19.1414188470, 0.0558819239 33, 19.0855451052, 0.0000081821, 19.0995135407, 0.0139766175 65, 19.0855374350, 0.0000005118, 19.0890314614, 0.0034945382 129, 19.0855369552, 0.0000000320, 19.0864105817, 0.0008736585 257, 19.0855369252, 0.0000000020, 19.0857553393, 0.0002184161 513, 19.0855369233, 0.0000000001, 19.0855915273, 0.0000546041 ''' true_val, _tol = intg.quad(f, a, b) print('true value = %12.8f' % (true_val,)) neval = zeros(kmax, dtype=int) vals_dic = {} err_dic = {} # try various approximations methods = [trapz, simps, boole, ] for k in xrange(kmax): n = 2 ** (k + 1) + 1 neval[k] = n x = np.linspace(a, b, n) y = f(x) for method in methods: name = method.__name__.title() q = method(y, x) vals_dic.setdefault(name, []).append(q) err_dic.setdefault(name, []).append(abs(q - true_val)) name = 'Clenshaw-Curtis' q, _ec3 = clencurt(f, a, b, (n - 1) / 2) vals_dic.setdefault(name, []).append(q[0]) err_dic.setdefault(name, []).append(abs(q[0] - true_val)) name = 'Chebychev' ck = np.polynomial.chebyshev.chebfit(x, y, deg=min(n-1, 36)) cki = np.polynomial.chebyshev.chebint(ck) q = np.polynomial.chebyshev.chebval(x[-1], cki) vals_dic.setdefault(name, []).append(q) err_dic.setdefault(name, []).append(abs(q - true_val)) # ck = chebfit(f,n,a,b) # q = chebval(b,chebint(ck,a,b),a,b) # qc2[k] = q; ec2[k] = abs(q - true) name = 'Gauss-Legendre' # quadrature q = intg.fixed_quad(f, a, b, n=n)[0] # [x, w]=qrule(n,1) # x = (b-a)/2*x + (a+b)/2 % Transform base points X. # w = (b-a)/2*w % Adjust weigths. # q = sum(feval(f,x)*w) vals_dic.setdefault(name, []).append(q) err_dic.setdefault(name, []).append(abs(q - true_val)) # display results names = sorted(vals_dic.keys()) num_cols = 2 formats = ['%4.0f, ', ] + ['%10.10f, ', ] * num_cols * 2 formats[-1] = formats[-1].split(',')[0] formats_h = ['%4s, ', ] + ['%20s, ', ] * num_cols formats_h[-1] = formats_h[-1].split(',')[0] headers = ['evals'] + ['%12s %12s' % ('approx', 'error')] * num_cols while len(names) > 0: print(''.join(fi % t for fi, t in zip(formats_h, ['ftn'] + names[:num_cols]))) print(' '.join(headers)) data = [neval] for name in names[:num_cols]: data.append(vals_dic[name]) data.append(err_dic[name]) data = np.vstack(tuple(data)).T for k in xrange(kmax): tmp = data[k].tolist() print(''.join(fi % t for fi, t in zip(formats, tmp))) if plot_error: plt.figure(0) for name in names[:num_cols]: plt.loglog(neval, err_dic[name], label=name) names = names[num_cols:] if plot_error: plt.xlabel('number of function evaluations') plt.ylabel('error') plt.legend() plt.show('hold') def main(): # val, err = clencurt(np.exp, 0, 2) # valt = np.exp(2) - np.exp(0) # [Q, err] = quadgr(lambda x: x ** 2, 1, 4, 1e-9) # [Q, err] = quadgr(humps, 1, 4, 1e-9) # # [x, w] = h_roots(11, 'newton') # sum(w) # [x2, w2] = la_roots(11, 1, 't') # # from scitools import numpyutils as npu #@UnresolvedImport # fun = npu.wrap2callable('x**2') # p0 = fun(0) # A = [0, 1, 1]; B = [2, 4, 3] # area, err = gaussq(fun, A, B) # # fun = npu.wrap2callable('x**2') # [val1, err1] = gaussq(fun, A, B) # # # Integration of x^2*exp(-x) from zero to infinity: # fun2 = npu.wrap2callable('1') # [val2, err2] = gaussq(fun2, 0, np.inf, wfun=3, alpha=2) # [val2, err2] = gaussq(lambda x: x ** 2, 0, np.inf, wfun=3, alpha=0) # # Integrate humps from 0 to 2 and from 1 to 4 # [val3, err3] = gaussq(humps, A, B) # # [x, w] = p_roots(11, 'newton', 1, 3) # y = np.sum(x ** 2 * w) x = np.linspace(0, np.pi / 2) _q0 = np.trapz(humps(x), x) [q, err] = romberg(humps, 0, np.pi / 2, 1e-4) print q, err def test_docstrings(): np.set_printoptions(precision=7) import doctest doctest.testmod() if __name__ == '__main__': test_docstrings() # qdemo(np.exp, 0, 3, plot_error=True) # plt.show('hold') # main()
true
8a4a0520dca6833f05184b35b4a187b6d52a142b
Python
kalpitha/ml
/finalMLLab/6. ngram/pr22.py
UTF-8
467
3.234375
3
[]
no_license
from nltk import word_tokenize file_content = open("in1.txt").read() wordlist = word_tokenize(file_content) print('\nTokens List:\n') print(wordlist) def getNGrams(input_list, n): print('\n',n,'_Grams:\n') return [input_list[i:i+n] for i in range(len(input_list)-(n-1))] list =getNGrams(wordlist, 1) di=dict() count=0 for i in list: d="" for j in i: d=d + j + " " count=count+1 print(d) if(d in di): di[d]=di[d]+1 else: di[d]=1 print(di)
true
409c3380c366db68b1b27786b434a84cf23a8c60
Python
Lmyxxn/JZoffer
/code/21.调整数组顺序使奇数位于偶数前面.py
UTF-8
737
3.359375
3
[]
no_license
class Solution: def exchange(self, nums: List[int]) -> List[int]: #法2 进阶版,代码重用性更好,为功能扩展提供便利,可以修改isodd函数,实现不同的判断功能 i = 0 j = len(nums) - 1 while i < j: if not self.isOdd(nums[i]) and self.isOdd(nums[j]): nums[i], nums[j] = nums[j], nums[i] i += 1 j -= 1 elif not self.isOdd(nums[i]) and not self.isOdd(nums[j]): j -= 1 elif self.isOdd(nums[i]) and self.isOdd(nums[j]): i += 1 else: i += 1 j -= 1 return nums def isOdd(self, n): return n & 1 == 1
true
e02908c25fd6e7c14c677ea1abeeaa7acb86262b
Python
pianomanzero/python_scripts
/Python3/isitools_dev/cmdhlpr.py
UTF-8
1,021
2.71875
3
[]
no_license
#!/usr/bin/env python from __future__ import print_function from builtins import input from sys import argv import subprocess def splitThisFile(file): workfile = file nodes = open(workfile, "r") listOfLines = nodes.readlines() nodes = [] for L in range(len(listOfLines)): thisline = listOfLines[L] #nodes.append(thisline.split()) nodes.append(thisline) # print(nodes) return nodes def main(): myfile = "mycommands" cmdList = splitThisFile(myfile) if len(argv) < 2: print() for c in range(0,len(cmdList)): print(cmdList[c]) print() else: script, var1 = argv #index = int(var1) if isinstance(int(var1), int): index = int(var1) print(cmdList[index]) else: print("I need an integer") quit() # closing lines for script execution if __name__ == "__main__": import sys # if main is defined, uncomment next line main()
true
315b99150c55c4a953714bf99f1fb385044b2469
Python
vickymzheng/PracticeInterviewQuestions
/maximal_land.py
UTF-8
1,137
3.703125
4
[]
no_license
# Jeff Flake # I want buy land, and all of the land is at different prices. # I want to maximum contiguous amount of land I can get within a certain budget. # Each of the plots of land is a different price, but all plots of land are the same size. # A contiguous block of land corresponds to a contiguous segment in the price array. def get_maximal_land_size(prices, budget): starting = 0 for i, price in enumerate(prices): starting = i if price < budget: break n = len(prices) if (starting == n-1): return 0 max_cont_block_size = curr_block_size = 1 curr_block_start = starting curr_block_cost = prices[starting] starting+=1 for i in range(starting, n): land_cost = prices[i] while ( (land_cost + curr_block_cost > budget) and (curr_block_start < i) ): curr_block_cost-=prices[curr_block_start] curr_block_size-=1 curr_block_start+=1 if not (land_cost + curr_block_cost > budget): curr_block_cost += land_cost curr_block_size += 1 if (curr_block_size > max_cont_block_size): max_cont_block_size = curr_block_size return max_cont_block_size print(get_maximal_land_size([6], 5))
true
91a48fbac2e38b9f2914b8da86c67f47672fb257
Python
cmdelcarmen/AirBnB_clone_v3
/api/v1/views/users.py
UTF-8
2,119
2.625
3
[ "LicenseRef-scancode-public-domain" ]
permissive
#!/usr/bin/python3 """users api rest""" from flask import jsonify, abort, request, make_response import models from api.v1.views import app_views from models import storage from models.user import User @app_views.route("/users", methods=["GET"], strict_slashes=False) def all_users(): """retrieve all users""" all_list = [] for user in storage.all(User).values(): all_list.append(user.to_dict()) return jsonify(all_list), 200 @app_views.route("/users/<string:user_id>", methods=["GET"], strict_slashes=False) def user_by_id(user_id): """find by id""" user = storage.get(User, user_id) if not user: abort(404) return jsonify(user.to_dict()), 200 @app_views.route("/users/<string:user_id>", methods=["DELETE"], strict_slashes=False) def delete_user_by_id(user_id): """delete one user""" user = storage.get(User, user_id) if not user: abort(404) storage.delete(user) storage.save() return jsonify({}), 200 @app_views.route("/users/", methods=["POST"], strict_slashes=False) def create_user(): """save one user""" json = request.get_json() if not json: abort(400, "Not a JSON") email = json.get("email", None) if not email: abort(400, "Missing email") password = json.get("password", None) if not password: abort(400, "Missing password") new_user = User() new_user.email = email new_user.password = password new_user.save() return jsonify(new_user.to_dict()), 201 @app_views.route("/users/<string:user_id>", methods=["PUT"], strict_slashes=False) def update_user(user_id): """update one user""" user_obj = models.storage.get(User, user_id) if not user_obj: return abort(404) json = request.get_json() if not json: abort(400, "Not a JSON") ignore = ['id', 'email', 'created_at', 'updated_at'] for key, value in json.items(): if key not in ignore: setattr(user_obj, key, value) models.storage.save() return jsonify(user_obj.to_dict())
true
e00bafd7633b5f51e778d92753e55ed45f41b4bd
Python
anqicheng/termprojectspring16
/controller.py
UTF-8
3,448
2.78125
3
[]
no_license
import time import random import pdb from threading import Thread class Arrow(object): # State Constants WAITING = -1 APPROACHING = 0 READY = 1 HIT = 2 EXPIRED = 3 # Animation Constants timeInterval = 15 # In terms of ms baseXs = [22, 160, 270, 365] baseY = 520 verticalMoveStep = -4 attackableTop = 45 attackableBottom = 192 def isAttackable(self): return self.state == self.READY def isTerminated(self): return self.state == self.HIT or self.state == self.EXPIRED def updateStatus(self): # Update the status based on its location, not valid for expired ones if self.isTerminated(): pass if self.currentY < self.attackableTop: self.state = self.EXPIRED elif self.currentY < self.attackableBottom: self.state = self.READY def animate(self): if not self.isTerminated(): self.currentY += self.verticalMoveStep self.updateStatus() self.view.moveArrow(self.viewId, 0, self.verticalMoveStep) self.view.callback(self.timeInterval, self.animate) if self.isTerminated(): self.view.deleteArrow(self.viewId) def startAnimation(self): self.viewId = self.view.insertArrow(self.direction, self.currentX, self.currentY) self.view.callback(self.timeInterval, self.animate) self.state = self.APPROACHING def prepare(self): self.view.callback(self.delay, self.startAnimation) def __init__(self, view, direction, delay): self.view = view self.direction = direction self.currentX = self.baseXs[direction] self.currentY = self.baseY self.state = self.WAITING self.delay = delay class Controller(object): class Runnable(Thread): def __init__(self, exe): self.exe = exe def run(self): self.exe() # Data fields view = None # Link to view model = None # Link to model # Message Handlers def onMouseClick(self, destination): # TO DO: parse the mouse click destination if destination == 'game': self.goToGame() elif destination == 'how': pass def onKeyStroke(self, key): self.keyStroke.add(key) # Methods def __init__(self): self.view = None self.model = None self.keyStroke = set() def start(self): self.view.drawWelcome() self.view.waitUser() def linkView(self, view): self.view = view def linkModel(self, model): self.model = model def goToGame(self): beats = self.model.beatDetect('res/music/cmm.wav') self.view.drawGame() arrs = [] last = 2 for i in beats: if random.randint(0, 9) <= 2 + last: last = 0 arrs.append(Arrow(self.view, random.randint(0, 3), int(i * 1000))) else: last = 2 for now in arrs: self.Runnable(now.prepare).run() self.soundDemo() def respondKeyStroke(self): # Returns True if the game should end print self.keyStroke for key in self.keyStroke: if key == 'Escape': exiting = True else: pass return exiting def soundDemo(self): self.model.playSound()
true
28bf2675ca12bfc8487131a4af1a5a6bfca91046
Python
Taycode/tayflutterwave
/tayflutterwave/tayflutterwave.py
UTF-8
3,646
2.71875
3
[ "MIT" ]
permissive
import hashlib import requests import json import base64 from Crypto.Cipher import DES3 class Flutterwave(object): """this is the getKey function that generates an encryption Key for you by passing your Secret Key as a parameter""" def __init__(self, public_key, secret_key, live=True): self.public_key = public_key self.secret_key = secret_key self.base_url = 'https://api.ravepay.co' # if not live set base_url to RavePay sandbox url if not live: self.base_url = 'https://ravesandboxapi.flutterwave.com' @staticmethod def __get_key(secret_key): hashed_secret_key = hashlib.md5(secret_key.encode("utf-8")).hexdigest() hashed_secret_key_last_12 = hashed_secret_key[-12:] secret_key_adjusted = secret_key.replace('FLWSECK-', '') secret_key_adjusted_first_12 = secret_key_adjusted[:12] return secret_key_adjusted_first_12 + hashed_secret_key_last_12 """This is the encryption function that encrypts your payload by passing the text and your encryption Key.""" @staticmethod def __encrypt_data(key, plain_text): block_size = 8 pad_diff = block_size - (len(plain_text) % block_size) cipher = DES3.new(key, DES3.MODE_ECB) plain_text = "{}{}".format(plain_text, "".join(chr(pad_diff) * pad_diff)) ''' cipher.encrypt - the C function that powers this doesn't accept plain string, rather it accepts byte strings, hence the need for the conversion below ''' test = plain_text.encode('utf-8') encrypted = base64.b64encode(cipher.encrypt(test)).decode("utf-8") return encrypted def pay_via_card(self, data): data.update({'PBFPubKey': self.public_key}) # hash the secret key with the get hashed key function hashed_sec_key = self.__get_key(self.secret_key) # encrypt the hashed secret key and payment parameters with the encrypt function encrypt_key = self.__encrypt_data(hashed_sec_key, json.dumps(data)) # payment payload payload = { "PBFPubKey": self.public_key, "client": encrypt_key, "alg": "3DES-24" } # card charge endpoint endpoint = self.base_url + "/flwv3-pug/getpaidx/api/charge" response = requests.post(endpoint, json=payload) return response.json() def validate_payment_with_card(self, transaction_reference, otp): data = { "PBFPubKey": self.public_key, "transaction_reference": transaction_reference, "otp": otp } endpoint = self.base_url + "/flwv3-pug/getpaidx/api/validatecharge" response = requests.post(endpoint, json=data) return response.json() def verify_payment_with_card(self, txref): endpoint = self.base_url + "/flwv3-pug/getpaidx/api/v2/verify" data = { "txref": txref, "SECKEY": self.secret_key } response = requests.post(endpoint, json=data) return response.json() def transfer_to_bank(self, data): data.update({'seckey': self.secret_key}) endpoint = self.base_url + "/v2/gpx/transfers/create" response = requests.post(endpoint, json=data) return response.json() def check_transfer_to_bank(self, reference): endpoint = self.base_url + "/v2/gpx/transfers" querystring = { "seckey": self.secret_key, "reference": reference } response = requests.get(endpoint, data=querystring) return response.json()
true
4abbbf6e070f54e86b8e13a9cf081008f3c62ea1
Python
globelabs/globe-connect-python
/globe/connect/oauth.py
UTF-8
1,987
2.625
3
[]
no_license
# Globe Oauth API import pycurl, json, os from StringIO import StringIO as BytesIO # GLOBAL SECTION # request endpoints SUBS_URL = "https://developer.globelabs.com.ph/dialog/oauth?app_id=%s" TOKEN_URL = "https://developer.globelabs.com.ph/oauth/access_token?app_id=%s&app_secret=%s&code=%s" class Oauth: # caption app key and app secret when class is instanciated # # @param string key app key # @param string secret app secret def __init__(self, key, secret): # set key to self self.key = key # set secret to self self.secret = secret # authentication url getter # # @return string url authentication url def getRedirectUrl(self): # return authentication url return (SUBS_URL % (self.key)) # access token getter # # @param string code authetication code # @return string token access token def getAccessToken(self, code): # prepare request url url = (TOKEN_URL % (self.key, self.secret, code)) # prepare certificate file information certFile = BytesIO() # get current directory certFile.write(os.getcwd()) # append certificate path certFile.write("/globe/globe.crt") # prepare certificate certFile = certFile.getvalue() # instanciate response response = BytesIO() # instanciate curl c = pycurl.Curl() # set request url c.setopt(c.URL, url) # set request to post c.setopt(pycurl.POST, 1) # ssl verification to true c.setopt(pycurl.SSL_VERIFYPEER, 1) c.setopt(pycurl.SSL_VERIFYHOST, 2) # set certifcate path c.setopt(pycurl.CAINFO, certFile) # write response c.setopt(c.WRITEDATA, response) # perform curl c.perform() # return curl response return response.getvalue()
true
214b03c929e203c33bd4adde57c05b38594e5dcf
Python
AutoSolverProject/solver
/cnf_syntax.py
UTF-8
19,141
2.78125
3
[]
no_license
import copy import math from collections import defaultdict from typing import List, Set from propositional_logic.syntax import Formula as PropositionalFormula, is_variable from propositional_logic.semantics import Model UNSAT = "UNSAT" SAT = "SAT" SAT_UNKNOWN = "SAT_UNKNOWN" class CNFClause: def __init__(self, positive_literals: Set[str] = None, negative_literals: Set[str] = None): self.positive_literals = positive_literals if positive_literals is not None else set() self.negative_literals = negative_literals if negative_literals is not None else set() self.all_literals = dict.fromkeys(self.positive_literals, True) self.all_literals.update(dict.fromkeys(self.negative_literals, False)) for pos_var in self.positive_literals: assert is_variable(pos_var) for neg_var in self.negative_literals: assert is_variable(neg_var) self.is_sat = UNSAT if len(self) == 0 else SAT_UNKNOWN self.inferred_assignment = None self.watched_literals = set() self.update_watched_literals_and_maybe_propagate(dict()) def __repr__(self) -> str: if len(self) == 0: return "" my_repr = "(" * (len(self) - 1) first_pos = 0 first_neg = 0 pos_literals_list = list(self.positive_literals) neg_literals_list = list(self.negative_literals) if len(pos_literals_list) > 0: my_repr += str(pos_literals_list[0]) first_pos = 1 elif len(neg_literals_list) > 0: my_repr += "~" + str(neg_literals_list[0]) first_neg = 1 for pos_index in range(first_pos, len(pos_literals_list)): my_repr += "|" + str(pos_literals_list[pos_index]) + ")" for neg_index in range(first_neg, len(neg_literals_list)): my_repr += "|" + "~" + str(neg_literals_list[neg_index]) + ")" return my_repr def __eq__(self, other: object) -> bool: return isinstance(other, CNFClause) \ and self.positive_literals == other.positive_literals \ and self.negative_literals == other.negative_literals def __ne__(self, other: object) -> bool: return not self == other def __hash__(self) -> int: return hash(str(self)) def __len__(self): return len(self.all_literals) def to_PropositionalFormula(self) -> PropositionalFormula: return PropositionalFormula.parse(str(self)) def is_contain_negation_of_literal(self, variable: str, assignment: bool) -> bool: return self.all_literals.get(variable, assignment) != assignment def get_all_variables(self) -> Set[str]: return set(self.all_literals.keys()) def get_all_literals(self) -> Set[str]: return {pos for pos in self.positive_literals} | {'~' + neg for neg in self.negative_literals} def on_backjump(self, model: Model): self.update_with_new_model(model) self.update_watched_literals_and_maybe_propagate(model) return self.inferred_assignment if self.inferred_assignment is not None else self.is_sat def update_with_new_model(self, model: Model): for pos in self.positive_literals: # Assuming we have small clauses, but big models if model.get(pos, False): self.watched_literals = set() self.inferred_assignment = None self.is_sat = SAT return for neg in self.negative_literals: if not model.get(neg, True): self.watched_literals = set() self.inferred_assignment = None self.is_sat = SAT return # No literal was satisfied, so SAT_UNKNOWN unless all of them are in the model, and then there's no chance for SAT if self.get_all_variables().issubset(model.keys()): self.is_sat = UNSAT else: self.is_sat = SAT_UNKNOWN def sat_value_under_assignment(self, variable: str, assignment: bool): if self.is_sat in (SAT, UNSAT) or variable not in self.all_literals: return self.inferred_assignment if self.inferred_assignment is not None else self.is_sat elif self.inferred_assignment is not None: # We have only one shot to get SAT return SAT if self.inferred_assignment == (variable, assignment) else UNSAT elif self.all_literals.get(variable, not assignment) == assignment: return SAT return SAT_UNKNOWN def is_satisfied_under_assignment(self, variable: str, assignment: bool) -> bool: return self.all_literals.get(variable, not assignment) == assignment def update_with_new_assignment(self, variable: str, assignment: bool, model: Model): if self.is_sat in (SAT, UNSAT): return self.is_sat # No new assignment will change this state, so spare the check if self.all_literals.get(variable, not assignment) == assignment: self.is_sat = SAT self.inferred_assignment = None self.watched_literals = set() return SAT # NOTE: If we're here, the assigned variable is either not in our clause, OR the assignment is not satisfying us if self.inferred_assignment is not None and self.inferred_assignment[0] == variable: self.is_sat = UNSAT # When we have an inferred variable, the only chance we'll be SAT is if it's assigned correctly self.inferred_assignment = None self.watched_literals = set() return UNSAT if variable in self.watched_literals: # We got an un-satisfying assignment to one of out watch literals self.update_watched_literals_and_maybe_propagate(model) assert self.is_sat == SAT_UNKNOWN # If we got here, we MUST be SAT_UNKNOWN return self.inferred_assignment if self.inferred_assignment is not None else self.is_sat def update_watched_literals_and_maybe_propagate(self, model: Model): self.watched_literals = set() # Finding 1 watch literals is as difficult as finding 2, so don't keep the old watched_literals self.inferred_assignment = None if self.is_sat in (SAT, UNSAT): return candidates = self.get_all_variables() - model.keys() num_to_take = min(2, len(candidates)) if num_to_take >= 1: # Update watched_literals the_chosen_ones = list(candidates)[:num_to_take] self.watched_literals = set(the_chosen_ones) if num_to_take == 1: # Also update inferred_assignment (i.e. propagate) inferred_variable = the_chosen_ones[0] self.inferred_assignment = inferred_variable, self.all_literals[inferred_variable] class CNFFormula: def __init__(self, clauses: List[CNFClause]): self.clauses = clauses self.variable_to_containing_clause = dict() self.last_result = SAT_UNKNOWN for clause in self.clauses: for var in clause.get_all_variables(): current_clauses = self.variable_to_containing_clause.get(var, set()) current_clauses.add(clause) self.variable_to_containing_clause[var] = current_clauses def __repr__(self) -> str: if len(self.clauses) == 0: return "" my_repr = "(" * (len(self.clauses) - 1) my_repr += str(self.clauses[0]) for clause_index in range(1, len(self.clauses)): my_repr += "&" + str(self.clauses[clause_index]) + ")" return my_repr def __eq__(self, other: object) -> bool: return isinstance(other, CNFFormula) and self.clauses == other.clauses def __ne__(self, other: object) -> bool: return not self == other def __hash__(self) -> int: return hash(str(self)) def __len__(self): return len(self.clauses) def to_PropositionalFormula(self) -> PropositionalFormula: return PropositionalFormula.parse(str(self)) def get_all_variables(self) -> Set[str]: return set(self.variable_to_containing_clause.keys()) def count_clauses_satisfied_by_assignment(self, variable: str, assignment: bool): assert is_variable(variable) sat_counter = 0 for clause in self.variable_to_containing_clause[variable]: if clause.is_satisfied_under_assignment(variable, assignment): sat_counter += 1 return sat_counter def add_clause(self, new_clause: CNFClause): self.clauses.append(new_clause) for var in new_clause.get_all_variables(): current_clauses = self.variable_to_containing_clause.get(var, set()) current_clauses.add(new_clause) self.variable_to_containing_clause[var] = current_clauses def on_backjump(self, model: Model): sat_counter = 0 found_unsat = None inferred_assignment = SAT_UNKNOWN # If we got one inferred assignment, we'll return it. Otherwise, we'll return SAT_UNKNOWN for clause in self.clauses: result = clause.on_backjump(model) if result == UNSAT: found_unsat = clause # Just a precaution, if it happens entire formula UNSAT, and we'll catch that in other places elif result == SAT: sat_counter += 1 elif result == SAT_UNKNOWN: continue else: # Just a precaution, as backjumping preserves propagated assignments inferred_assignment = result + (clause,) if found_unsat is not None: self.last_result = UNSAT, found_unsat elif sat_counter == len(self.clauses): self.last_result = SAT else: self.last_result = inferred_assignment def update_with_new_assignment(self, variable: str, assignment: bool, model: Model): assert is_variable(variable) are_all_sat = True found_unsat = None inferred_assignment = SAT_UNKNOWN # If we got one inferred assignment, we'll return it. Otherwise, we'll return SAT_UNKNOWN for clause in self.variable_to_containing_clause[variable]: result = clause.update_with_new_assignment(variable, assignment, model) if result == UNSAT: found_unsat = clause # Maybe can return here, but won't make big difference are_all_sat = False elif result == SAT: continue elif result == SAT_UNKNOWN: are_all_sat = False else: # Result is a inferred assignment. Continue looping to make sure not UNSAT. Note that means inferred_assignment might change inferred_assignment = result + (clause,) are_all_sat = False if found_unsat is not None: self.last_result = UNSAT, found_unsat elif are_all_sat: # Only if all clauses containing the last assigned var are SAT, bother checking all the rest are SAT, and if not put SAT_UNKOWN for clause in self.clauses: if clause.is_sat != SAT: are_all_sat = False self.last_result = SAT if are_all_sat else SAT_UNKNOWN else: self.last_result = inferred_assignment class ImplicationGraph: def __init__(self, decided_variables: Model = None): decided_variables = dict(decided_variables) if decided_variables is not None else dict() self.curr_decision_level = 0 self.conflict_clause = None self.decision_variables = [decided_variables] self.inferred_variables = [dict()] self.total_model = dict() self.total_model.update(decided_variables) # Map each inferred variable to the clause that caused it, and at which level that was self.causing_clauses = {variable: (None, self.curr_decision_level) for variable in decided_variables.keys()} def __repr__(self) -> str: my_repr = "" for i in range(self.curr_decision_level): my_repr += "LEVEL " + str(i) + ": " + "\n" \ + "Decided: " + str(self.decision_variables[i]) + "\n" \ + "Inferred: " + str(self.inferred_variables[i]) + "\n" return my_repr def __eq__(self, other: object) -> bool: return isinstance(other, ImplicationGraph) \ and self.decision_variables == other.decision_variables \ and self.inferred_variables == other.inferred_variables \ and self.curr_decision_level == other.curr_decision_level \ and self.causing_clauses == other.causing_clauses def __ne__(self, other: object) -> bool: return not self == other def __hash__(self) -> int: return hash(str(self)) def __len__(self): return self.curr_decision_level def add_decision(self, variable, assignment): assert is_variable(variable) assert variable not in self.total_model.keys() self.curr_decision_level += 1 self.decision_variables.append({variable: assignment}) self.inferred_variables.append(dict()) self.total_model[variable] = assignment self.causing_clauses[variable] = (None, self.curr_decision_level) def add_inference(self, variable: str, assignment: bool, causing_clause: CNFClause): assert is_variable(variable) assert variable not in self.total_model.keys() self.inferred_variables[-1].update({variable: assignment}) self.total_model[variable] = assignment self.causing_clauses[variable] = (causing_clause, self.curr_decision_level) def get_causing_clause_of_variable(self, variable: str) -> CNFClause: assert is_variable(variable) return self.causing_clauses[variable][0] def get_decision_level_of_variable(self, variable: str) -> int: assert is_variable(variable) return self.causing_clauses[variable][1] def get_causing_variables(self, variable: str) -> Set[str]: assert is_variable(variable) causing_clause = self.get_causing_clause_of_variable(variable) return causing_clause.get_all_variables() if causing_clause is not None else set() def learn_conflict_clause(self) -> CNFClause: uip = self.find_uip() uip_assignment = self.total_model[uip] conflict_clause = self.conflict_clause while not conflict_clause.is_contain_negation_of_literal(uip, uip_assignment): conflict_clause = self.resolve(conflict_clause) return conflict_clause def find_uip(self) -> str: assert self.conflict_clause is not None assert self.curr_decision_level >= 1 last_decision_variable = list(self.decision_variables[-1].keys())[0] # List of dict only for level 0. From lvl. 1 always 1 decision var per level potential_uips = set(self.total_model.keys()) potential_uips_distances = {potential_uip: math.inf for potential_uip in potential_uips} current_path = list() def dfs_helper(current_node): # Finds all uips we must go through and their min distances from the conflict nonlocal potential_uips, potential_uips_distances current_path.append(current_node) if current_node == last_decision_variable: potential_uips.intersection_update(set(current_path)) for node_index in range(len(current_path)): curr_node = current_path[node_index] curr_node_dist = node_index + 1 if curr_node_dist < potential_uips_distances[curr_node]: potential_uips_distances[curr_node] = curr_node_dist else: for parent in self.get_causing_variables(current_node): if parent not in current_path: # Avoid loop, even though we shouldn't have any - just in case dfs_helper(parent) current_path.pop() for var in self.conflict_clause.get_all_variables(): # todo: not sure of this line dfs_helper(var) # After the dfs we have all possible uips and their distances assert len(potential_uips) >= 1 # The decision variable is a UIP, so there's at least one closest_uip = None closest_uip_dist = math.inf for potential_uip in potential_uips: if 0 < potential_uips_distances[potential_uip] < closest_uip_dist: closest_uip = potential_uip closest_uip_dist = potential_uips_distances[closest_uip] assert closest_uip is not None return closest_uip def resolve(self, clause_to_resolve: CNFClause) -> CNFClause: last_assigned_var = self.get_last_assigned_var(clause_to_resolve) last_assigned_var_causing_clause = self.get_causing_clause_of_variable(last_assigned_var) vars_to_resolve = (clause_to_resolve.positive_literals & last_assigned_var_causing_clause.negative_literals) | \ (clause_to_resolve.negative_literals & last_assigned_var_causing_clause.positive_literals) assert len(vars_to_resolve) > 0 new_pos_vars = (clause_to_resolve.positive_literals | last_assigned_var_causing_clause.positive_literals) - vars_to_resolve new_neg_vars = (clause_to_resolve.negative_literals | last_assigned_var_causing_clause.negative_literals) - vars_to_resolve return CNFClause(new_pos_vars, new_neg_vars) def get_last_assigned_var(self, clause_to_resolve: CNFClause) -> str: last_assigned_var = "" last_assigned_var_decision_level = -1 for cur_var in clause_to_resolve.get_all_variables(): cur_decision_level = self.get_decision_level_of_variable(cur_var) if (cur_decision_level > last_assigned_var_decision_level) \ or (cur_decision_level == last_assigned_var_decision_level and cur_var < last_assigned_var): last_assigned_var = cur_var last_assigned_var_decision_level = cur_decision_level assert last_assigned_var != "" return last_assigned_var def backjump_to_level(self, new_level): assert 0 <= new_level assert new_level < self.curr_decision_level self.curr_decision_level = new_level self.conflict_clause = None self.decision_variables = self.decision_variables[:self.curr_decision_level + 1] self.inferred_variables = self.inferred_variables[:self.curr_decision_level + 1] all_vars_before_backjump = set(self.total_model.keys()) self.total_model = dict() for i in range(self.curr_decision_level + 1): self.total_model.update(self.decision_variables[i]) self.total_model.update(self.inferred_variables[i]) all_vars_after_backjump = set(self.total_model.keys()) lost_vars = all_vars_before_backjump - all_vars_after_backjump for var in lost_vars: del self.causing_clauses[var]
true
060e14c92d1a49e51b97bb91106f4d99ea8a9c05
Python
liyangdal/ezcf
/tests2/subdir/subdir/test_parent.py
UTF-8
3,281
2.609375
3
[ "MIT" ]
permissive
# coding: utf-8 import datetime import sys import unittest import ezcf class TestProto(unittest.TestCase): def test_import(self): from .. import sample_json self.assertEqual(sample_json.hello, "world") self.assertEqual(sample_json.a_list, [1 ,2, 3]) self.assertEqual(sample_json.a_dict, { "key1": 1000, "key2": [u"你好", 100] }) from .. import sample_yaml self.assertEqual(sample_yaml.Date, datetime.datetime(2001, 11, 23, 20, 3, 17)) self.assertEqual(sample_yaml.Fatal, 'Unknown variable "bar"') self.assertEqual( sample_yaml.Stack, [{'code': 'x = MoreObject("345\\n")\n', 'file': 'TopClass.py', 'line': 23}, {'code': 'foo = bar', 'file': 'MoreClass.py', 'line': 58}]) self.assertEqual(sample_yaml.Time, datetime.datetime(2001, 11, 23, 20, 2, 31)) self.assertEqual(sample_yaml.User, 'ed') self.assertEqual(sample_yaml.warning, u'一个 slightly different error message.') def test_from_import(self): from ..sample_json import a_list, a_dict self.assertEqual(a_list, [1 ,2, 3]) self.assertEqual(a_dict, { "key1": 1000, "key2": [u"你好", 100] }) from ..sample_yaml import Date, Fatal, Stack, Time, User self.assertEqual(Date, datetime.datetime(2001, 11, 23, 20, 3, 17)) self.assertEqual(Fatal, 'Unknown variable "bar"') self.assertEqual( Stack, [{'code': 'x = MoreObject("345\\n")\n', 'file': 'TopClass.py', 'line': 23}, {'code': 'foo = bar', 'file': 'MoreClass.py', 'line': 58}]) self.assertEqual(Time, datetime.datetime(2001, 11, 23, 20, 2, 31)) self.assertEqual(User, 'ed') if sys.version_info[:2] > (2, 6): with self.assertRaises(NameError): print(hello) with self.assertRaises(NameError): print(warning) def test_from_import_as(self): from ..sample_json import hello as h from ..sample_json import a_list as al from ..sample_json import a_dict as ad self.assertEqual(h, "world") self.assertEqual(al, [1 ,2, 3]) self.assertEqual(ad, { "key1": 1000, "key2": [u"你好", 100] }) from ..sample_yaml import Date as d from ..sample_yaml import Fatal as f from ..sample_yaml import Stack as s from ..sample_yaml import Time as t from ..sample_yaml import User as u from ..sample_yaml import warning as w self.assertEqual(d, datetime.datetime(2001, 11, 23, 20, 3, 17)) self.assertEqual(f, 'Unknown variable "bar"') self.assertEqual( s, [{'code': 'x = MoreObject("345\\n")\n', 'file': 'TopClass.py', 'line': 23}, {'code': 'foo = bar', 'file': 'MoreClass.py', 'line': 58}]) self.assertEqual(t, datetime.datetime(2001, 11, 23, 20, 2, 31)) self.assertEqual(u, 'ed') self.assertEqual(w, u'一个 slightly different error message.')
true
5bf6f4674ac39514f03e050d1d799fd2db568f61
Python
biabulinxi/Python-ML-DL
/Spider/Day01/01_urlopen.py
UTF-8
829
2.859375
3
[]
no_license
# -*- coding: utf-8 -*- # @Project:AID1810 # @Author:biabu # @Date:2019/2/26 11:35 # @File_name:01_urlopen.py # @IDE:PyCharm from urllib.request import urlopen, Request ###################################### # 无法设置请求头 url = 'http://www.baidu.com/' # 向网站发起请求并获取响应对象 response = urlopen(url) # print(response) # 获取响应对象的内容 html = response.read().decode('utf-8') # print(html) ######################################### # 可以设置请求代理 headers = {'User-Agent':"Mozilla/5.0"} # 1. 创建请求 request = Request(url, headers=headers) # 2. 获取响应 response = urlopen(request) # 3. 获取内容 html = response.read().decode('utf-8') # print(html) # 获取 http 响应码 print(response.getcode()) # 返回实际数据的url地址 print(response.geturl())
true
8504cdcea18698cfebd3b8fa386a64aa1adb0d07
Python
Dragnes/Doing-Math-With-Python
/Doing Math with Python CH 1 pg 1-26.py
UTF-8
15,183
4.5
4
[]
no_license
# Doing Math With Python Text # Chapter 1: pg 1-22 # Working with Fractions from fractions import Fraction f = Fraction(3, 4) #Fraction(numerator, denominator) Fraction(3, 4)+ 1 + 1.5 Fraction(3, 4) + 1 + Fraction(1, 4) # Complex Numbers a = 2 + 3j type(a) a = complex(2, 3) b = 3 + 3j a+b a-b a*b a**b a/b z = 2 + 3j z.real z.imag z.conjugate() 'To find the magnitude of a complex number' (z.real ** 2 + z.imag ** 2) ** 0.5 abs(z) a = '1' int(a) + 1 int('1') + 1 int('1.0') + 1 # returns an error # Handling Exceptions and Invalid Input a = 3/4 a = input() try: a = float(input('Enter a number: ')) except ValueError: print('You entered an invalid number') a = input('Input an integer: ') a = int(input()) a + 1 a = int(input()) 'when inputting a float number (1.0 or 4.2) an error will be returned' 4.2.is_integer() 1.0.is_integer() # Franctions and Complex Numbers as Inputs a = Fraction(input('Enter a fraction: ')) 'input 3/4 as well as 3/0' #3/0 well return an error (ZeroDivisionError) try: a = Fraction(input('Enter a fraction: ')) except ZeroDivisionError: print('Invalid fraction') z = complex(input('Enter a complex number: ')) # Writing Programs that do the Math for you (is number 'a' a facator of 'b') def is_factor(a, b): if b % a == 0: return True else: return False print(is_factor(4, 1024)) 'examples of range functions, range funtion will be used to write a function that finds factors of an integer' for i in range(4): print(i) for i in range(1, 4): print(i) for i in range(0, 10, 2): print(i) for i in range(1, 10, 2): print(i) ''' Find the Factors of an integer. ''' def factors(b): for i in range(1, b+1): if b % i == 0: print(i) if __name__ == '__main__': b = input('Your Number Please: ') b = float(b) if b > 0 and b.is_integer(): factors(int(b)) else: print('Please enter a positive integer') ''' Another way to find Factors ''' from functools import reduce n = int(input('Your Number Please: ')) def factors(n): return set(reduce(list.__add__, ([i, n//i] for i in range(1, int(pow(n, 0.5) + 1)) if n % i == 0))) print(factors(n)) # Generating Multiplication Tables 'Lets dig into to .format() method which will be used.' item1 = 'apples' item2 = 'bananas' item3 = 'grapes' print('At the store, I picked up some {0} and {1} and {2}.'.format(item1, item2, item3)) ''' Multiplication table printer ''' def multi_table(a): for i in range(1,11): print('{0} x {1} = {2}'.format(a, i, a*i)) if __name__ == '__main__': a = input('Enter a number: ') multi_table(float(a)) ''' Ever so slight different way for multiplication table printer ''' a = int(input('Enter a number: ')) # int or float def multi_table(a): for i in range(1, 11): print('{0} x {1} = {2}'.format(a, i, a*i)) print(multi_table(a)) 'Practice with format() method' '{0}'.format(1.25456) '{0:.2f}'.format(1.25456) '{0:.2f}'.format(1.25556) '{0:.2f}'.format(1) # Converting Units of Measurement ''' Unit converter: Miles and Kilometers ''' def print_menu(): print('1. Kilometers to Miles') print('2. Miles to Kilometers') def km_miles(): km = float(input('Enter distance in kilometers: ')) miles = km / 1.609 print('Distance in miles: {0}'.format(miles)) def miles_km(): miles = float(input('Enter distance in miles: ')) km = miles * 1.609 print('Distance in kilometers: {0}'.format(km)) if __name__ == '__main__': print_menu() choice = input('Which conversion would you like to do?: ') if choice == '1': km_miles() if choice == '2': miles_km() # Finding the Roots of a Quadratic Equation '''Solve the following quadratic eqation: x^2 + 2x + 1 = 0''' a = 1 b = 2 c = 1 D = (b**2 - 4*b*c)**0.5 x_1 = (-b + D)/(2*a) x_2 = (-b - D)/(2*a) ''' Quadratic equation root calculator ''' def roots(a, b, c): D = (b*b - 4*a*c)**0.5 x_1 = (-b + D)/(2*a) x_2 = (-b - D)/(2*a) print('x_1: {0}'.format(x_1)) print('x_2: {0}'.format(x_2)) if __name__ == '__main__': a = input('Enter a: ') b = input('Enter b: ') c = input('Enter c: ') roots(float(a), float(b), float(c)) 'Ever so slight simpler code' a = float(input('Enter a: ')) b = float(input('Enter b: ')) c = float(input('Enter c: ')) def roots(a, b, c): D = (b*b - 4*a*c)**0.5 x_1 = (-b + D)/(2*a) x_2 = (-b - D)/(2*a) print('x_1: {0}'.format(x_1)) print('x_2: {0}'.format(x_2)) print(roots(a, b, c)) # Chapter 1 Programming Challenges pages 22-26 # 1) Even-Odd Vending Machine number = int(input('Enter an integer: ')) def even_odd_vending(num): if (num % 2) == 0: print('Even') else: print('Odd') count = 1 print(num) while count <= 9: num += 2 print(num) count += 1 print(even_odd_vending(number)) # 2) Enhanced Multiplication Table Generator ''' Method 1 ''' a = int(input('Multiplication table of what number? ')) # int or float b = int(input('Multiples up to how many times? ')) def multi_table(a,b): for i in range(1, b+1): print('{0} x {1} = {2}'.format(a, i, a*i)) print(multi_table(a,b)) ''' Method 2 = more aligned with the text ''' def multi_table(a, b): for i in range(1,b+1): print('{0} x {1} = {2}'.format(a, i, a*i)) if __name__ == '__main__': a = int(input('Multiplication table of what number? ')) b = int(input('Multiples up to how many times? ')) multi_table(a, b) # 3) Enhanced Unit Converters ''' Method 1 ''' print('1. Kilometers to Miles') print('2. Miles to Kilometers') print('3. Kilograms to Pounds') print('4. Pounds to Kilograms') print('5. Celsius to Fahrenheit') print('6. Fahrenheit to Celsius') choice = input('Which conversion would you like to do?: ') if choice == '1': kilometers = float(input('Enter distance in kilometers: ')) miles = kilometers / 1.609 print('Distance in miles: {0}'.format(miles)) elif choice == '2': miles = float(input('Enter distance in miles: ')) kilometers = miles * 1.609 print('Distance in kilometers: {0}'.format(kilometers)) elif choice == '3': kilograms = float(input('Enter weight in kilograms: ')) pounds = kilograms * 2.205 print('Weight in pounds: {0}'.format(pounds)) elif choice == '4': pounds = float(input('Enter weight in pounds: ')) kilograms = pounds / 2.205 print('Weight in kilograms: {0}'.format(kilograms)) elif choice == '5': celsius = float(input('Enter the temperature in celsius: ')) fahrenheit = (9/5)*celsius + 32 print('Temperature in fahrenheit: {0}'.format(fahrenheit)) else: fahrenheit = float(input('Enter the temperature in fahrenheit: ')) celsius = (fahrenheit - 32)*(5/9) print('Temperature in celsius: {0}'.format(celsius)) ''' Method 2 = more aligned with the text ''' def print_menu(): print('1. Kilometers to Miles') print('2. Miles to Kilometers') print('3. Kilograms to Pounds') print('4. Pounds to Kilograms') print('5. Celsius to Fahrenheit') print('6. Fahrenheit to Celsius') def km_miles(): km = float(input('Enter distance in kilometers: ')) miles = km / 1.609 print('Distance in miles: {0}'.format(miles)) def miles_km(): miles = float(input('Enter distane in miles: ')) km = miles * 1.609 print('Distance in kilometers: {0}'.format(km)) def kg_pounds(): kg = float(input('Enter weight in kilograms: ')) pounds = kg * 2.205 print('Weight in pounds: {0}'.format(pounds)) def pounds_kg(): pounds = float(input('Enter weight in pounds: ')) kg = pounds / 2.205 print('Weight in kilograms: {0}'.format(kg)) def celsius_fahrenheit(): celsius = float(input('Enter temperature in celsius: ')) fahrenheit = (9/5)*celsius + 32 print('Temperature in fahrenheit: {0}'.format(fahrenheit)) def fahrenheit_celsius(): fahrenheit = float(input('Enter temperature in fahrenheit: ')) celsius = (fahrenheit - 32)*(5/9) print('Temperature in celsius: {0}'.format(celsius)) if __name__ == '__main__': print_menu() choice = input('Which conversion would you like to do?: ') if choice == '1': km_miles() if choice == '2': miles_km() if choice == '3': kg_pounds() if choice == '4': pounds_kg() if choice == '5': celsius_fahrenheit() if choice == '6': fahrenheit_celsius() # 4) Fraction Calculator ''' Method 1 ''' from fractions import Fraction op = input('Operation to preform - Add, Subtract, Divide, Multiply: ') a = Fraction(input('Enter the first fraction: ')) b = Fraction(input('Enter the second fraction: ')) if op.lower() == 'add': add = a+b print('Result of Addition of {0} and {1}: {2}'.format(a, b, add)) if op.lower() == 'subtract': subtract = a-b print('Result of Subtraction of {0} and {1}: {2}'.format(a, b, subtract)) if op.lower() == 'divide': divide = a/b print('Result of Division of {0} and {1}: {2}'.format(a, b, divide)) if op.lower() == 'multiply': multiply = a*b print('Result of Multiplication of {0} and {1}: {2}'.format(a, b, multiply)) ''' Method 2: more aligned with the text ''' from fractions import Fraction def add(a, b): print('Result of adding {0} and {1} is {2}'.format(a, b, a+b)) def subtract(a, b): print('Result of subtracting {0} and {1} is {2}'.format(a, b, a-b)) def divide(a, b): print('Result of dividing {0} by {1} is {2}'.format(a, b, a/b)) def multiply(a, b): print('Result of multiplying {0} and {1} is {2}'.format(a, b, a*b)) if __name__ == '__main__': try: op = input('Operation to perform - Add, Subtract, Divide, Multiply: ') a = Fraction(input('Enter first fraction: ')) b = Fraction(input('Enter second fraction: ')) if op.lower() == 'add': add(a, b) if op.lower() == 'subtract': subtract(a, b) if op.lower() == 'divide': divide(a, b) if op.lower() == 'multiply': multiply(a, b) except ValueError: print('Invalid fraction entered') # 5) Give Exit Power to the User ''' Rewrite programs in this chapter so that they continue executing until asked by the user to exit. ''' def factors(b): for i in range(1, b+1): if b % i == 0: print(i) if __name__ == '__main__': while True: b = input('Your Number Please: ') b = float(b) if b > 0 and b.is_integer(): factors(int(b)) else: print('Please enter a positive integer') answer = input('Do you want to exit? (y) for yes: ') if answer == 'y': break def multi_table(a): for i in range(1,11): print('{0} x {1} = {2}'.format(a, i, a*i)) if __name__ == '__main__': while True: a = input('Multiplication table of what integer? ') multi_table(float(a)) answer = input('Do you want to exit? (y) for yes: ') if answer == 'y': break def print_menu(): print('1. Kilometers to Miles') print('2. Miles to Kilometers') def km_miles(): km = float(input('Enter distance in kilometers: ')) miles = km / 1.609 print('Distance in miles: {0}'.format(miles)) def miles_km(): miles = float(input('Enter distance in miles: ')) km = miles * 1.609 print('Distance in kilometers: {0}'.format(km)) if __name__ == '__main__': while True: print_menu() choice = input('Which conversion would you like to do?: ') if choice == '1': km_miles() if choice == '2': miles_km() answer = input('Do you want to exit? (y) for yes: ') if answer == 'y': break def print_menu(): print('1. Kilometers to Miles') print('2. Miles to Kilometers') print('3. Kilograms to Pounds') print('4. Pounds to Kilograms') print('5. Celsius to Fahrenheit') print('6. Fahrenheit to Celsius') def km_miles(): km = float(input('Enter distance in kilometers: ')) miles = km / 1.609 print('Distance in miles: {0}'.format(miles)) def miles_km(): miles = float(input('Enter distane in miles: ')) km = miles * 1.609 print('Distance in kilometers: {0}'.format(km)) def kg_pounds(): kg = float(input('Enter weight in kilograms: ')) pounds = kg * 2.205 print('Weight in pounds: {0}'.format(pounds)) def pounds_kg(): pounds = float(input('Enter weight in pounds: ')) kg = pounds / 2.205 print('Weight in kilograms: {0}'.format(kg)) def celsius_fahrenheit(): celsius = float(input('Enter temperature in celsius: ')) fahrenheit = (9/5)*celsius + 32 print('Temperature in fahrenheit: {0}'.format(fahrenheit)) def fahrenheit_celsius(): fahrenheit = float(input('Enter temperature in fahrenheit: ')) celsius = (fahrenheit - 32)*(5/9) print('Temperature in celsius: {0}'.format(celsius)) if __name__ == '__main__': while True: print_menu() choice = input('Which conversion would you like to do?: ') if choice == '1': km_miles() if choice == '2': miles_km() if choice == '3': kg_pounds() if choice == '4': pounds_kg() if choice == '5': celsius_fahrenheit() if choice == '6': fahrenheit_celsius() answer = input('Do you want to exit? (y) for yes: ') if answer == 'y': break from fractions import Fraction def add(a, b): print('Result of adding {0} and {1} is {2}'.format(a, b, a+b)) def subtract(a, b): print('Result of subtracting {0} and {1} is {2}'.format(a, b, a-b)) def divide(a, b): print('Result of dividing {0} by {1} is {2}'.format(a, b, a/b)) def multiply(a, b): print('Result of multiplying {0} and {1} is {2}'.format(a, b, a*b)) if __name__ == '__main__': while True: try: op = input('Operation to perform - Add, Subtract, Divide, Multiply: ') a = Fraction(input('Enter first fraction: ')) b = Fraction(input('Enter second fraction: ')) if op.lower() == 'add': add(a, b) if op.lower() == 'subtract': subtract(a, b) if op.lower() == 'divide': divide(a, b) if op.lower() == 'multiply': multiply(a, b) except ValueError: print('Invalid fraction entered') answer = input('Do you want to exit? (y) for yes: ') if answer == 'y': break
true
7427caf4b6cb551ec88b631a424a75e45fece643
Python
akshaykalyan/solutions
/sub_large.py
UTF-8
253
3.203125
3
[]
no_license
arr = [345, 32, 45] t=[] main=[] for x in arr: if t==[] or t[-1]<x: t.append(x) else: main.append(t) t=[x] main.append(t) t=[len(x) for x in main ] t=t.index(max(t)) print(main[t] if len(main[t])>2 else "No increasing subsequence")
true
41ae24af6ee69cfd4997614dc7a5f0d88d36ebe0
Python
Bdwrds/flask_trials
/text_demo/app/deploy_model.py
UTF-8
1,517
2.75
3
[]
no_license
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ @author: benjaminedwards """ from flask import Flask, request, render_template import flask import tensorflow as tf import numpy as np import keras import pickle import os.path app = Flask(__name__) def load_model(): global graph graph = tf.get_default_graph() global model model = keras.models.load_model('models/data/text_sentiment.h5') pkl_file = open('models/data/word_index.pkl', 'rb') global word_index word_index = pickle.load(pkl_file) pkl_file.close() def prep_data(new_text): words = keras.preprocessing.text.text_to_word_sequence(new_text, filters='!"#$%&()*+,-./:;<=>?@[\]^_`{|}~ ', lower=True, split=' ') pred = np.array([word_index[word] if word in word_index else 0 for word in words]) pred =[pred,[]] x_pred = keras.preprocessing.sequence.pad_sequences(pred, maxlen=100, padding='post') return x_pred @app.route('/app') def my_form(): return render_template('home.html') @app.route('/app', methods=["GET","POST"]) def my_form_get(): data = {"success": False} new_text = request.form['Click Enter'] x_pred = prep_data(new_text) with graph.as_default(): data["prediction"] = model.predict(x_pred)[0][0] data["success"] = True return render_template('result.html',prediction = data["prediction"] ) if __name__ == "__main__": print(("Loading model and Flask please hold on!")) load_model() app.run(debug=True)
true
c3cb55902e3b74f455c2613943362e14324b753c
Python
kolun4ik/otus_python
/homework_01/tests/test_latest_log_get.py
UTF-8
4,455
2.609375
3
[]
no_license
import os import gzip import unittest from .base import FunctionalTest from datetime import datetime from log_analyzer import latest_log_get, config class LatestLogGetTests(FunctionalTest): """тест: тестируем ф-цию получения самого свежего лога NGINX в дирекории с логами""" def test_generate_warning_if_log_dir_not_exist(self): """тест: ожидаем исключение, если директория с логами не существует и выходим, не является ошибкой""" dir_fails = './log_fail' with self.assertRaises(IOError): latest_log_get(dir_fails) def test_generate_raise_if_nginx_log_not_exist(self): """тест: ожидаем исключение, если в log_dir отсутствуют логи nginx""" with self.assertRaises(IOError): latest_log_get(config['LOG_DIR']) def test_search_last_log_nginx_in_log_dir(self): """тест: поиск самаго свежего файла лога nginx в директории с логами""" now = 'nginx-access-ui.log-%s' % datetime.strftime(datetime.now(), '%Y%m%d') log_files = [now] for i in range(5): nginx_log = 'nginx-access-ui.log-%s' % self.random_date_gen( start='01.01.2017', end='04.05.2019') other_log = 'apache_accees-ui.log-%s' % self.random_date_gen( start='01.01.2017', end='19.05.2019') log_files.append(nginx_log) log_files.append(other_log) for file in log_files: with open(os.path.join(config['LOG_DIR'], file), 'w'): pass last_log = latest_log_get(config['LOG_DIR']) self.assertEqual(last_log.name, now) def test_return_named_tuple(self): """тест: что в именованый кортеж возвращается с полями: path, name, creation_date, extention""" #тут есть баг, если лог с расширением .gz, потому в name # имя попадает без расширения now = datetime.strftime(datetime.now(), '%Y%m%d') log_now = 'nginx-access-ui.log-%s' % now with gzip.open(os.path.join(config['LOG_DIR'], log_now), 'w'): pass last_log = latest_log_get(config['LOG_DIR']) self.assertEqual(last_log.name, log_now) self.assertEqual(last_log.path, os.path.realpath(config['LOG_DIR']+ '/')) self.assertEqual(last_log.creation_date, datetime.strftime(datetime.now(), '%d.%m.%Y')) self.assertEqual(last_log.extention, None) def test_log_name_endwith_as_plain_text(self): """тест: что вернеться последний лог как plain text""" log_file = 'nginx-access-ui.log-%s' % self.random_date_gen( start='01.01.2017', end='04.05.2019') with open(os.path.join(config['LOG_DIR'], log_file), 'w'): pass last_log = latest_log_get(config['LOG_DIR']) self.assertEqual(last_log.extention, None) def test_log_name_endwith_gz(self): """тест: что вернеться последний лог как архив .gz""" log_file = 'nginx-access-ui.log-%s.gz' % self.random_date_gen( start='01.01.2017', end='04.05.2019') with gzip.open(os.path.join(config['LOG_DIR'], log_file), 'w'): pass last_log = latest_log_get(config['LOG_DIR']) self.assertEqual(last_log.extention, '.gz') def test_log_name_not_return_bz2_extention(self): """тест: что ф-ция поиска свежего лога не возвращает .bz2""" log_file1 = 'nginx-access-ui.log-%s.gz' % self.random_date_gen( start='01.01.2017', end='04.05.2019') with gzip.open(os.path.join(config['LOG_DIR'], log_file1), 'w'): pass log_now = 'nginx-access-ui.log-%s.bz2' % datetime.strftime(datetime.now(), '%Y%m%d') with gzip.open(os.path.join(config['LOG_DIR'], log_now), 'w'): pass last_log = latest_log_get(config['LOG_DIR']) self.assertNotEqual(last_log.extention, '.bz2') if __name__ == "__main__": unittest.main()
true
7291be21909c42ad1cedcc3c60cbd09e1f8a7130
Python
windyStreet/MYPROJECT
/loamp/func/autoUpdate/projectupdate.py
UTF-8
3,729
2.53125
3
[]
no_license
#!/usr/bin/env python # !-*- coding:utf-8 -*- import sys import os import FormatPrint import JsonFileFunc import __projectupdate_double import __projectupdate_onehalf import __projectupdate_single import RungroupFunc import ResourceFunc class ProjectUpdate(object): def __init__(self): self.projectName = None self.updateVersion = None self.updateTime = None self.updateType = None self.deploymentmode = None self.tomcatConf = None self.hostInfostr = None self.willUpdateGroup = [] def replaceResource(projectName, updateVersion, updateTime): update(projectName, updateVersion, "replaceResource", updateTime) def restartProject(projectName, updateVersion=None, updateTime=None): update(projectName, updateVersion, "restartProject", updateTime) def updateProject(projectName, updateVersion=None, updateTime=None): update(projectName, updateVersion, "update", updateTime) def update(projectName, updateVersion, updateType, updateTime): pu = ProjectUpdate() pu.projectName = projectName pu.updateVersion = updateVersion pu.updateType = updateType pu.updateTime = updateTime pu.willUpdateGroup = [] confPath = sys.path[0] + os.sep + 'conf' + os.sep + 'tomcat-conf.json' pu.tomcatConf = JsonFileFunc.readFile(confPath) if pu.tomcatConf is None: FormatPrint.printFalat('can not read tomcat-conf configure') if pu.projectName not in pu.tomcatConf['projectname']: FormatPrint.printFalat(str(pu.projectName) + ' not configure in the tomcat-conf.json') pu.deploymentmode = pu.tomcatConf['projectname'][projectName]['deploymentmode'] pu.hostInfostr = str(pu.tomcatConf['hostname']) + ":" + str(pu.tomcatConf['serverip']) if pu.deploymentmode == 'single': # 单个模式 FormatPrint.printDebug("curent project is single deploymentmode") pu.willUpdateGroup.append("groupmaster") if updateType == 'replaceResource': ResourceFunc.replceResource(pu) elif updateType == 'restartProject': __projectupdate_single.restartProject(pu) elif updateType == 'update': __projectupdate_single.restartProject(pu) else: pass elif pu.deploymentmode == 'onehalf': # 半启动模式 pu.willUpdateGroup.append("groupmaster") pu.willUpdateGroup.append("groupbackup") FormatPrint.printDebug("curent project is onehalf deploymentmode") if updateType == 'replaceResource': ResourceFunc.replceResource(pu) elif updateType == 'restartProject': __projectupdate_onehalf.restartProject(pu) elif updateType == 'update': __projectupdate_onehalf.restartProject(pu) else: pass elif pu.deploymentmode == 'double': # 主备组模式 FormatPrint.printDebug("curent project is double deploymentmode") currentRunGroup = RungroupFunc.getRunGroupName(pu.projectName)[0] if currentRunGroup == "groupmaster": pu.willUpdateGroup.append("groupbackup") elif currentRunGroup == "groupbackup": pu.willUpdateGroup.append("groupmaster") else: FormatPrint.printFalat(" can not get the will update group , please check config ") if updateType == 'replaceResource': ResourceFunc.replceResource(pu) elif updateType == 'restartProject': __projectupdate_double.restartProject(pu) elif updateType == 'update': __projectupdate_double.restartProject(pu) else: pass else: FormatPrint.printFalat(str(pu.projectName) + 'project configure wrong deploymentmode ')
true
46ee8d162a609460143345533f8d800e8b2b7498
Python
ricardo-rolo/data-science
/statistics_1/amostra_estratificada.py
UTF-8
870
3.09375
3
[]
no_license
# -*- coding: utf-8 -*- """ Created on Fri Feb 14 15:07:34 2020 @author: ric10 """ import pandas as pd from sklearn.model_selection import train_test_split iris = pd.read_csv('iris.csv') #contagem de ocorrências por 'class' iris['class'].value_counts() #separando os grupos de acordo com suas respectivas representatividades x, _, y, _ = train_test_split(iris.iloc[:,0:4], iris.iloc[:, 4], test_size = 0.5, stratify = iris.iloc[:,4]) y.value_counts() infert = pd.read_csv('infert.csv') #contagem de ocorrências por 'education' infert['education'].value_counts() #separando os grupos de acordo com suas respectivas representatitivades x1, _, y1, _ = train_test_split(infert.iloc[:, 2:9], infert.iloc[:, 1], test_size = 0.6, stratify = infert.iloc[:, 1]) y1.value_counts()
true
6b618280fdc41028715a25458a676b7d4acfea7a
Python
masonng-astro/NICERsoft
/scripts/fitharms.py
UTF-8
11,515
2.71875
3
[ "LicenseRef-scancode-warranty-disclaimer", "MIT" ]
permissive
#!/usr/bin/env python from __future__ import print_function,division from astropy.io import fits import matplotlib.pyplot as plt import numpy as np import matplotlib from pint.templates import lctemplate,lcprimitives,lcfitters from pint.eventstats import z2m,sf_z2m, hm, sf_hm, sig2sigma import sys from astropy import log import scipy.stats def compute_fourier(phases,nh=10,pow_phase=False): '''Compute Fourier amplitudes from an array of pulse phases phases should be [0,1.0) nh is the number of harmonics (1 = fundamental only) Returns: cos and sin component arrays, unless pow_phase is True then returns Fourier power (Leahy normalized) and phase arrays DC bin is not computed or returned ''' phis = 2.0*np.pi*phases # Convert phases to radians n = len(phis) c = np.asarray([(np.cos(k*phis)).sum() for k in range(1,nh+1)])/n s = np.asarray([(np.sin(k*phis)).sum() for k in range(1,nh+1)])/n c *= 2.0 s *= 2.0 if pow_phase: # CHECK! There could be errors here! # These should be Leahy normalized powers fourier_pow = (n/2)*(c**2+s**2) fourier_phases = np.arctan2(s,c) return n,fourier_pow,fourier_phases else: return n,c,s def evaluate_fourier(n,c,s,nbins,k=None): # This should be updated to do a little integral over each bin. # Currently evaluates the model at the center of each bin model = np.zeros(nbins)+n/nbins theta = 2.0*np.pi*np.arange(nbins,dtype=np.float)/nbins theta += theta[1]/2.0 if k is not None: model += (n/nbins)*(c[k]*np.cos((k+1)*theta) + s[k]*np.sin((k+1)*theta)) else: for k in range(len(c)): model += (n/nbins)*(c[k]*np.cos((k+1)*theta) + s[k]*np.sin((k+1)*theta)) return model def evaluate_chi2(hist,model): # Question here is whether error should be sqrt(data) or sqrt(model) return ((hist-model)**2/model).sum() def compute_phist(phases,nbins=200): h, edges = np.histogram(phases,bins=np.linspace(0.0,1.0,nbins+1,endpoint=True)) return edges[:-1], h if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description = "Fit a set of pulse phases to harmonics") parser.add_argument("evname", help="Input event file (must have PULSE_PHASE column)") parser.add_argument("--white",help = "Replace phases with white random numbers, for testing", action="store_true") parser.add_argument("--txt",help = "Assume input file is .txt instead of FITS", action="store_true") parser.add_argument("--showcomps",help = "Show individual components of harmonic fit on plot", action="store_true") parser.add_argument("--noplot",help = "Don't show any plots", action="store_true") parser.add_argument("--output",help = "Save figures with basename", default=None) parser.add_argument("--numharm",help="Max harmonic to use in analysis (1=Fundamental only)",default=4,type=int) parser.add_argument("--numbins",help="Number of bins for histograms",default=200,type=int) parser.add_argument("--emin",help="Minimum energy to include (keV)",default=0.25,type=float) parser.add_argument("--emax",help="Maximum energy to include (keV)",default=12.0,type=float) args = parser.parse_args() if args.txt: exposure = None ph,en = np.loadtxt(args.evname,unpack=True,usecols=(1,2),skiprows=3) log.info("Read {0} phases from .txt file".format(len(ph))) tstart = 0.0 else: f = fits.open(args.evname) en = f['events'].data.field('pi') ph = f['events'].data.field('pulse_phase') log.info("Read {0} phases from FITS file".format(len(ph))) exposure = float(f['events'].header['EXPOSURE']) tstart = float(f['events'].header['TSTART']) log.info("Exposure = {0} s".format(exposure)) if args.white: # Random phases uniform over [0,1) ph = np.random.random_sample(len(en)) log.info("Replaced with {0} random phases".format(len(en))) matplotlib.rcParams['font.family'] = "serif" matplotlib.rcParams.update({'font.size': 13}) matplotlib.rc('axes', linewidth=1.5) if args.output: resultsfile = open("{0}_results.txt".format(args.output),"w") print("{0:.6f}".format(tstart),file=resultsfile) # Filter on energy idx = np.where(np.logical_and(en > int(args.emin*100), en < int(args.emax*100) ))[0] ph = ph[idx] en = en[idx] # Hack to manually split out a segment #q = 3 # Use 0, 1, 2, 3 #qn = len(ph)//4 #ph = ph[q*qn:(q+1)*qn] #en = en[q*qn:(q+1)*qn] nbins = args.numbins bins,phist = compute_phist(ph,nbins=nbins) fig,axs = plt.subplots(nrows=2,ncols=1) plt.subplots_adjust(left=0.15, bottom=0.1, right=0.97, top=0.94,hspace=0.001) ax=axs[0] ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True, labelbottom=False) # ax.text(.5,.8,'PSR J0030+0451', horizontalalignment='center', transform=ax.transAxes) # ax.text(.5,.8,'PSR J0437-4715', horizontalalignment='center', transform=ax.transAxes) # ax.text(.2,.8,'PSR J1231-1411', horizontalalignment='center', transform=ax.transAxes) # ax.text(.8,.8,'PSR J2124-3358', horizontalalignment='center', transform=ax.transAxes) ax.step(np.concatenate((bins,np.ones(1))),np.concatenate((phist,phist[-1:])),color='k',where='post') ax.set_xlim(0.0,1.0) ax.set_ylabel('Counts per bin') n,c,s = compute_fourier(ph,nh=args.numharm) model = evaluate_fourier(n,c,s,nbins) ax.plot(bins+bins[1]/2.0,model,color='r',lw=2) if args.showcomps: for k in range(len(c)): ax.plot(np.linspace(0.0,1.0,nbins),evaluate_fourier(n,c,s,nbins,k=k),ls='--') fn,fpow,fphase = compute_fourier(ph,nh=args.numharm,pow_phase=True) i=1 log.info("Harm LeahyPower Phase(deg)") for fp, fph in zip(fpow,fphase): log.info("{0:2d} {1:12.3f} {2:9.3f} deg".format(i,fp,np.rad2deg(fph))) if args.output: print("{0:2d} {1:12.3f} {2:9.3f}".format(i,fp,np.rad2deg(fph)),file=resultsfile) i+=1 pcounts = (model-model.min()).sum() pcounts_err = np.sqrt(model.sum() + model.min()*len(model)) if exposure: log.info("Pulsed counts = {0:.3f}, count rate = {1:.3f}+/-{2:.4f} c/s".format(pcounts, pcounts/exposure, pcounts_err/exposure)) log.info("Total rate = {0:.3f} c/s, Unpulsed rate = {1:.3f} c/s".format(n/exposure, n/exposure-pcounts/exposure)) ax = axs[1] ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True) ax.errorbar(np.linspace(0.0,1.0,nbins),phist-model,yerr=np.sqrt(phist),fmt='.',ecolor='k') chisq = evaluate_chi2(phist,model) nparams = 1 + 2*args.numharm # 1 for DC + 2 for each sinusoidal component ax.set_xlim(0.0,1.0) ax.set_xlabel('Pulse Phase') ax.set_ylabel('Residuals (counts)') ax.tick_params(direction='in', length=6, width=2, colors='k',top=True) ndof = len(phist)-nparams axs[0].set_title("NumHarm = {0}, Chisq = {1:.2f}, DOF = {2}".format(args.numharm,chisq,ndof)) ax.grid(1) # ax.set_label("{0} Harmonic Fit to Profile".format(args.numharm)) plt.tight_layout() if args.output: fig.savefig("{0}_harmfit.pdf".format(args.output)) # Plot distribution of residuals to compare to a gaussian fig,ax = plt.subplots() ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True) chi = (phist-model)/np.sqrt(model) #x, y = np.histogram(chi,bins=np.linspace(-2.0,2.0,0.1)) x = np.linspace(-3.0,3.0,32,endpoint=True) ax.hist(chi,bins=x,density=True) ax.set_title('Histogram of residuals') ax.plot(x,scipy.stats.norm.pdf(x)) plt.tight_layout() # Plot histogram of phase differences to see if they are Poisson fig,ax = plt.subplots() ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True) ph.sort() pdiffs = (ph[1:]-ph[:-1])*1.0 x = np.linspace(0.0,50.0e-6,200,endpoint=True) histn, histbins, histpatches = ax.hist(pdiffs,bins=x,density=True,log=True) ax.set_title('Histogram of phase differences') ax.set_xlabel('Phase diff') ax.plot(x,np.exp(-len(pdiffs)*(x*1.0))*n) plt.tight_layout() # Compute number of significant harmonics # First by plotting Leahy powers fig,axs = plt.subplots(nrows=2,ncols=1) ax = axs[0] ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True) n,pow,phases = compute_fourier(ph,nh=nbins//2,pow_phase=True) ax.semilogy(np.arange(len(pow))+1,pow,marker='o') # Leahy power of 5.99 corresponds to 2 sigma, I think ax.axhline(5.99,color='r') ax.axhline(2.0,color='b',ls='--') #ax.xaxis.set_ticks(np.arange(1,len(pow)+1)) #ax.set_xlabel('Harmonic Number') ax.set_ylabel('Leahy Power') ax.set_title("Power Spectrum") plt.tight_layout() ax = axs[1] ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True) ax.plot(np.arange(len(pow))+1,pow,marker='o') ax.axhline(5.99,color='r') ax.axhline(2.0,color='b',ls='--') #ax.xaxis.set_ticks(np.arange(1,len(pow)+1)) ax.set_ylim(0.0,10.0) ax.text(1.0,7.0,'Mean power {0:.3f}'.format(pow.mean())) ax.set_xlabel('Harmonic Number') ax.set_ylabel('Leahy Power') if args.output: fig.savefig("{0}_leahy.pdf".format(args.output)) plt.tight_layout() # Then by computing chisq as a function of number of harmonics in model chisq = [] ndof = [] maxharms = np.arange(1,min(33,nbins//4+1)) n,c,s = compute_fourier(ph,nh=maxharms[-1]) for maxharm in maxharms: model = evaluate_fourier(n,c[:maxharm],s[:maxharm],nbins) chisq.append(evaluate_chi2(phist,model)) nparams = 1 + 2*maxharm # 1 for DC + 2 for each sinusoidal component ndof.append(len(phist)-nparams) chisq = np.asarray(chisq) ndof = np.asarray(ndof) fig,ax = plt.subplots() ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True) ax.plot(maxharms,chisq/ndof,'o',ls='-') ax.set_ylim(0.5,3.0) ax.axhline(1.0,color='r',ls='--') ax.set_xlabel('Number of Harmonics') ax.set_ylabel('Chisq') ax.set_title("Chisq/DOF vs. Number of Harmonics") #ax.xaxis.set_ticks(maxharms) #ax.semilogy(maxharms,ndof) plt.tight_layout() if args.output: fig.savefig("{0}_chisq.pdf".format(args.output)) # Then look at amplitudes and phases as a function of energy cuts # Look at color oscillations # Select photons above and below some energy cut and look at the ratio ensplit = 55 softidx = np.where(en<ensplit)[0] hardidx = np.where(en>=ensplit)[0] colorbins = 32 softbins, softn = compute_phist(ph[softidx],nbins=colorbins) hardbins, hardn = compute_phist(ph[hardidx],nbins=colorbins) softn = np.asarray(softn,dtype=np.float) hardn = np.asarray(hardn,dtype=np.float) fig,ax = plt.subplots() color = hardn/softn # Propagate Poisson errors to get error in ratio cerr = color*np.sqrt(1.0/softn + 1.0/hardn) #ax.step(np.concatenate((softbins,np.ones(1))),np.concatenate((color,color[-1:])),color='C0',where='post') ax.errorbar(softbins+0.5*softbins[1],color,yerr=cerr,color='k',fmt='.') ax.set_xlim(0.0,1.0) ax.set_xlabel('Pulse Phase') ax.set_ylabel('Spectral Color') if not args.noplot: plt.show()
true
e7bb70630e4b6ec76f76e58ce81be3b648801540
Python
amcastro-tri/drake
/tools/workspace/cmake_util.bzl
UTF-8
1,444
2.71875
3
[ "BSD-3-Clause" ]
permissive
# -*- python -*- # This file contains some helper functions that are meant to be used with # CMake imported targets. def split_cmake_list(cmake_list_str): """Convert a string containing a CMake-style list into a 'proper' list.""" if len(cmake_list_str) == 0: return [] return cmake_list_str.split(";") def _is_library_extension(ext): """Return True if ext looks like a library extension.""" if ext in ["a", "so", "dylib"]: return True if ext.startswith("so."): return True if ext.endswith(".dylib"): return True return False def library_to_linkopts(path): """Convert absolute path to a library to suitable linkopts.""" opts = [] if not path.startswith("/"): fail("{} is not an absolute path.".format(path)) dirname, libname = path.rsplit("/", 1) # Add `-Wl,-rpath,<path>` for `-L<path>`. # See https://github.com/RobotLocomotion/drake/issues/7387#issuecomment-359952616 # noqa opts.append("-Wl,-rpath," + dirname) opts.append("-L" + dirname) if "." in libname: ext = libname.split(".", 1)[1] else: ext = "" if not _is_library_extension(ext): fail("{} does not appear to be a path to a library.".format(path)) if not libname.startswith("lib"): fail("Name of library {} must start with `lib`.".format(libname)) opts.append("-l" + libname[3:].split(".", 1)[0]) return opts
true
8428353ad9252d3561f39d36d9122f163b1e49d1
Python
CristobalMH/CYPCristobalMH
/libro/problemas_resueltos/capitulo_3/problema3_1.py
UTF-8
343
3.796875
4
[]
no_license
sumpar = 0 sumimp = 0 cuepar = 0 for i in range (1, 10, 1): num = int(input("Ingrese un número (1 - 270)")) if num != 0: if ((-1) ** num) > 0: sumpar += num cuepar += 1 else: sumimp += num propar = sumpar / cuepar print("El promedio de los números pares es: %.2f y la suma de los números impares es %d" %(propar, sumimp))
true
1f54bef28b15491a0ec8aba4f7cb1dd56d261214
Python
jizongFox/DGA1033
/admm_research/loss/__init__.py
UTF-8
485
2.546875
3
[]
no_license
from .loss import CrossEntropyLoss2d, MSE_2D,PartialCrossEntropyLoss2d,negativePartialCrossEntropyLoss2d LOSS = {'cross_entropy': CrossEntropyLoss2d, 'mse_2d': MSE_2D, 'partial_ce':PartialCrossEntropyLoss2d, 'neg_partial_ce':negativePartialCrossEntropyLoss2d} def get_loss_fn(name: str, **kwargs): try: return LOSS.get(name)(**kwargs) except Exception as e: raise ValueError('name error when inputting the loss name, with %s'%str(e))
true
2968621530ad6ab6bdc2f9efb6c58d74be47f4f7
Python
jonaqp/Multiple-Destinations-Route-Optimizer
/OptFun.py
UTF-8
4,150
2.9375
3
[]
no_license
#!/usr/bin/env python import os import urllib, json, pdb import datetime from itertools import chain, combinations, permutations import time from tstr2sec import fun_tstr2sec #Initialize Time start_time = time.time() #Main Function def mainfun(origins, destinations, waypoints, travel_mode, depart_time): #Setting up Google API request endpoint = 'https://maps.googleapis.com/maps/api/distancematrix/json?' api_key = 'INSERT YOUR API' #Converting user input to readable format by the software. currentdate = datetime.datetime.today().strftime('%Y-%m-%d') if depart_time == "now": timestamp = time.time() else: depart_time = str(str(currentdate)+ ' ' + depart_time) ts = time.strptime(depart_time, '%Y-%m-%d %H:%M') timestamp = time.mktime(ts) departure_time = (timestamp) departure_time = "{:.0f}".format((departure_time)) origins = origins.replace(" ","+") destinations = destinations.replace(" ","+") waypoints_names = waypoints.replace(" ","+") waypoints_names = waypoints_names.split('-') traffic_mode = 'pessimistic' #Creating dictionaries to host user waypoints for the permutations process way_num = len(waypoints_names) requested_possible_combs = [] permutation = [] #Defining initial variables #a= first stop, b = second stop, total_time= time from stop a to stop b, itcount = premuation calculation count, first = boolean value to check if first premuation count. a = 0 b = 0 total_time = 0 itcount = 0 first = True orig_destinations = destinations orig_origin = origins #Creating list of all possible routes between all user waypoints. The list contains as many rows as possible routes. permutation_raw = list(permutations(waypoints_names, way_num)) #Because the route will always start from the origin and ends at the destiniation, the below line modifies the raw permutation list to add origin stop at the beginnning of the route, and destiniation at the end of the route for n in permutation_raw: n = list(n) n.append(destinations) n.insert(0,origins) permutation.append(n) return opttimzer(permutation, a, b , first, destinations, origins, departure_time, traffic_mode, travel_mode, api_key, endpoint) #Optimization/Salesman problem algorithm start here. In short, the software sends API request to Google Maps requesting travel time between point A to point B. Then, it recives Json structure containing travelling time between point A to point B. After that, the software stores the received data in a list, and compares them with the next travelling time taken to travel between point A and C, if it's less than A and B; it nominates it as better route. def opttimzer(permutation, a, b , first, destinations, origins, departure_time, traffic_mode, travel_mode, api_key, endpoint): for route in permutation: num_routes = len(permutation) len_route = len(route) duration = 0 total_time = 0 count = 0 itcount = 0 calculated = {} prog = int(itcount)/(num_routes)*100 if not first: itcount += 1 while (count < len_route-1): a = count b = a +1 count += 1 check = str(route[a]+route[b]) in calculated.keys() if check: duration = calculated.get(route[a]+route[b]) total_time = int(duration) + int(total_time) elif not check: calculated[str(route[a]+route[b])] = duration nav_request = 'origins={}&destinations={}&departure_time={}&mode={}&traffic_model={}&key={}'.format(route[a],route[b],departure_time,travel_mode,traffic_mode,api_key) request = endpoint + nav_request response = urllib.urlopen(request).read() directions = json.loads(response) trip_time = (directions["rows"][0]["elements"][0]["duration"]["text"]) duration = fun_tstr2sec(trip_time) calculated[str(route[a]+route[b])] = duration calculated.copy().items() total_time = int(duration) + int(total_time) if first: best_time = total_time best_route = route calculated[str(route[a]+route[b])] = duration first = False if not first and total_time < best_time: best_time = total_time best_route = str(route).replace("+"," ") best_time = str(datetime.timedelta(seconds=best_time)) return best_route, best_time
true
b9923f8063dfc448559058155ff301517a898bfa
Python
Asunqingwen/LeetCode
/medium/Shortest Word Distance II.py
UTF-8
1,682
4.03125
4
[]
no_license
# -*- coding: utf-8 -*- # @Time : 2019/9/2 0002 14:46 # @Author : 没有蜡笔的小新 # @E-mail : sqw123az@sina.com # @FileName: Shortest Word Distance II.py # @Software: PyCharm # @Blog :https://blog.csdn.net/Asunqingwen # @GitHub :https://github.com/Asunqingwen """ Design a class which receives a list of words in the constructor, and implements a method that takes two words word1 and word2 and return the shortest distance between these two words in the list. Your method will be called repeatedly many times with different parameters.  Example: Assume that words = ["practice", "makes", "perfect", "coding", "makes"]. Input: word1 = “coding”, word2 = “practice” Output: 3 Input: word1 = "makes", word2 = "coding" Output: 1 Note: You may assume that word1 does not equal to word2, and word1 and word2 are both in the list. """ import sys from typing import List class WordDistance: def __init__(self, words: List[str]): self.index_dict = {} for index, word in enumerate(words): if word in self.index_dict: self.index_dict[word].append(index) else: self.index_dict[word] = [index] def shortest(self, word1: str, word2: str) -> int: l1, l2 = self.index_dict[word1], self.index_dict[word2] min_dis = sys.maxsize i1, i2 = 0, 0 while i1 < len(l1) and i2 < len(l2): if l1[i1] < l2[i2]: min_dis = min(min_dis, l2[i1] - l1[i1]) i1 += 1 else: min_dis = min(min_dis, l1[i1] - l2[i2]) i2 += 1 return min_dis if __name__ == '__main__': words = ["practice", "makes", "perfect", "coding", "makes"] word1 = "coding" word2 = "practice" res = WordDistance(words) result = res.shortest(word1, word2) print(result)
true
00af330172081be92f1cf979deec05bff043b309
Python
ESArch/GeolocatedMining
/MovieLens/Validator.py
UTF-8
16,904
2.71875
3
[]
no_license
import networkx as nx import numpy as np import Graphs.GraphSimilarity as gs from operator import itemgetter import scipy import psycopg2 import pandas as pd def select(query): con = None result = None try: con = psycopg2.connect(database='MovieLens', user='postgres', password='postgres', host='localhost') cur = con.cursor() cur.execute(query) result = cur.fetchall() except psycopg2.DatabaseError as e: print("Error {}".format(e)) finally: if con: con.close() return result def find_users_by_movies(movie1, movie2, min_rating1, min_rating2): query = "SELECT r1.user_id \ FROM rating r1, rating r2 \ WHERE r1.movie_id = {} AND r1.rating_value >= {} \ AND r2.movie_id = {} and r2.rating_value >= {} \ AND r1.user_id = r2.user_id".format(movie1, movie2, min_rating1, min_rating2) result = select(query) return len(result) def build_ratings_table(min_rating): query = "SELECT user_id, movie_id, rating_value FROM rating WHERE rating_value >= {}".format(min_rating) result = select(query) tuples = [(x[0], x[1]) for x in result] values = [x[2] for x in result] index = pd.MultiIndex.from_tuples(tuples, names=['user_id', 'movie_id']) s = pd.Series(values, index=index) s.sort_index(inplace=True) return s def validate(num_transactions): pattern_contexts = np.load("data/pattern_contexts.npy") transaction_contexts = np.load("data/transaction_contexts.npy") with open("data/decoded_itemsets.txt", "r") as f: transactions = f.readlines() with open("data/decoded_patterns.txt", "r") as f, open("validation({}).txt".format(num_transactions), "w") as g: count = 0 global_confidences = list() seen_ratings = build_ratings_table(0) liked_ratings = build_ratings_table(4) for line in f: context_vector = pattern_contexts[count, :].reshape(1, pattern_contexts.shape[1]) distances = scipy.spatial.distance.cdist(transaction_contexts, context_vector, 'cosine') distances = np.reshape(distances, distances.shape[0]) most_relevant_transaction_indexes = distances.argsort()[:num_transactions] # most_relevant_transaction = transactions[most_relevant_transaction_index].strip().split(" ") transaction_items = set() for i in most_relevant_transaction_indexes: transaction = transactions[i].strip().split(" ") for transaction_item in transaction: transaction_items.add(int(transaction_item)) pattern = line.strip().split(" ") local_confidences = list() for pattern_item in pattern: pattern_item_id = int(pattern_item) like_first = set(liked_ratings[:, pattern_item_id].to_dict().keys()) seen_first = set(seen_ratings[:, pattern_item_id].to_dict().keys()) for transaction_item in transaction_items: transaction_item_id = int(transaction_item) # like_both = find_users_by_movies(pattern_item_id, 4, transaction_item_id, 4) # seen_both = find_users_by_movies(pattern_item_id, 0, transaction_item_id, 4) like_second = set(liked_ratings[:, transaction_item_id].to_dict().keys()) like_both = len(like_second.intersection(like_first)) seen_both = len(like_second.intersection(seen_first)) if seen_both > 0: local_confidences.append(like_both/seen_both) local_confidence = np.average(local_confidences) print("Confidence for pattern {}: {}".format(count, local_confidence)) g.write("Confidence for pattern {}: {}\n".format(count, local_confidence)) count += 1 global_confidences.append(local_confidence) # if count > 10: # break global_confidence = np.average(global_confidences) print("Global confidence: {}".format(global_confidence)) g.write("\nGlobal confidence: {}".format(global_confidence)) def validate2(num_transactions): pattern_contexts = np.load("data/pattern_contexts.npy") transaction_contexts = np.load("data/transaction_contexts.npy") with open("data/decoded_itemsets.txt", "r") as f: transactions = f.readlines() with open("data/decoded_patterns.txt", "r") as f, open("validation2({}).txt".format(num_transactions), "w") as g: count = 0 global_confidences = list() seen_ratings = build_ratings_table(0) liked_ratings = build_ratings_table(4) for line in f: context_vector = pattern_contexts[count, :].reshape(1, pattern_contexts.shape[1]) distances = scipy.spatial.distance.cdist(transaction_contexts, context_vector, 'cosine') distances = np.reshape(distances, distances.shape[0]) most_relevant_transaction_indexes = distances.argsort()[:num_transactions] # most_relevant_transaction = transactions[most_relevant_transaction_index].strip().split(" ") transaction_items = set() for i in most_relevant_transaction_indexes: transaction = transactions[i].strip().split(" ") for transaction_item in transaction: transaction_items.add(int(transaction_item)) pattern = line.strip().split(" ") local_confidences = list() liked_pattern = set(liked_ratings[:, int(pattern[0])].to_dict().keys()) for pattern_item in pattern: pattern_item_id = int(pattern_item) liked_pattern = liked_pattern.intersection( set(liked_ratings[:, pattern_item_id].to_dict().keys())) for transaction_item in transaction_items: transaction_item_id = int(transaction_item) # like_both = find_users_by_movies(pattern_item_id, 4, transaction_item_id, 4) # seen_both = find_users_by_movies(pattern_item_id, 0, transaction_item_id, 4) like_transaction_item = set(liked_ratings[:, transaction_item_id].to_dict().keys()) seen_transaction_item = set(seen_ratings[:, transaction_item_id].to_dict().keys()) like_both = len(liked_pattern.intersection(like_transaction_item)) seen_both = len(liked_pattern.intersection(seen_transaction_item)) if seen_both > 0: local_confidences.append(like_both / seen_both) local_confidence = np.average(local_confidences) print("Confidence for pattern {}: {}".format(count, local_confidence)) g.write("Confidence for pattern {}: {}\n".format(count, local_confidence)) count += 1 global_confidences.append(local_confidence) # if count > 10: # break global_confidence = np.average(global_confidences) print("Global confidence: {}".format(global_confidence)) g.write("\nGlobal confidence: {}".format(global_confidence)) def validate3(num_transactions): pattern_contexts = np.load("data/pattern_contexts.npy") transaction_contexts = np.load("data/transaction_contexts.npy") with open("data/decoded_itemsets.txt", "r") as f: transactions = f.readlines() with open("data/decoded_patterns.txt", "r") as f, open("validation3({}).txt".format(num_transactions), "w") as g: count = 0 global_confidences = list() seen_ratings = build_ratings_table(0) liked_ratings = build_ratings_table(4) for line in f: context_vector = pattern_contexts[count, :].reshape(1, pattern_contexts.shape[1]) distances = scipy.spatial.distance.cdist(transaction_contexts, context_vector, 'cosine') distances = np.reshape(distances, distances.shape[0]) most_relevant_transaction_indexes = distances.argsort()[:num_transactions] # most_relevant_transaction = transactions[most_relevant_transaction_index].strip().split(" ") transaction_items = set() for i in most_relevant_transaction_indexes: transaction = transactions[i].strip().split(" ") for transaction_item in transaction: transaction_items.add(int(transaction_item)) pattern = line.strip().split(" ") local_confidences = list() liked_pattern = set(liked_ratings[:, int(pattern[0])].to_dict().keys()) for pattern_item in pattern: pattern_item_id = int(pattern_item) liked_pattern = liked_pattern.intersection( set(liked_ratings[:, pattern_item_id].to_dict().keys())) liked = len(liked_pattern) for transaction_item in transaction_items: transaction_item_id = int(transaction_item) # like_both = find_users_by_movies(pattern_item_id, 4, transaction_item_id, 4) # seen_both = find_users_by_movies(pattern_item_id, 0, transaction_item_id, 4) like_transaction_item = set(liked_ratings[:, transaction_item_id].to_dict().keys()) seen_transaction_item = set(seen_ratings[:, transaction_item_id].to_dict().keys()) # like_both = len(liked_pattern.intersection(like_transaction_item)) seen_both = len(liked_pattern.intersection(seen_transaction_item)) if liked > 0: local_confidences.append(seen_both / liked) local_confidence = np.average(local_confidences) print("Confidence for pattern {}: {}".format(count, local_confidence)) g.write("Confidence for pattern {}: {}\n".format(count, local_confidence)) count += 1 global_confidences.append(local_confidence) # if count > 10: # break global_confidence = np.average(global_confidences) print("Global confidence: {}".format(global_confidence)) g.write("\nGlobal confidence: {}".format(global_confidence)) def validate4(num_transactions): pattern_contexts = np.load("data/pattern_contexts.npy") transaction_contexts = np.load("data/transaction_contexts.npy") with open("data/decoded_itemsets.txt", "r") as f: transactions = f.readlines() with open("data/decoded_patterns.txt", "r") as f, open("validation4({}).txt".format(num_transactions), "w") as g: count = 0 global_confidences = list() seen_ratings = build_ratings_table(0) liked_ratings = build_ratings_table(4) for line in f: context_vector = pattern_contexts[count, :].reshape(1, pattern_contexts.shape[1]) distances = scipy.spatial.distance.cdist(transaction_contexts, context_vector, 'cosine') distances = np.reshape(distances, distances.shape[0]) most_relevant_transaction_indexes = distances.argsort()[:num_transactions] pattern = line.strip().split(" ") local_confidences = list() liked_pattern = set(liked_ratings[:, int(pattern[0])].to_dict().keys()) for pattern_item in pattern: pattern_item_id = int(pattern_item) liked_pattern = liked_pattern.intersection( set(liked_ratings[:, pattern_item_id].to_dict().keys())) for i in most_relevant_transaction_indexes: transaction = transactions[i].strip().split(" ") seen_transaction = set(seen_ratings[:, int(transaction[0])].to_dict().keys()) liked_transaction = set(liked_ratings[:, int(transaction[0])].to_dict().keys()) for transaction_item in transaction: transaction_item_id = int(transaction_item) seen_transaction = set(seen_ratings[:, transaction_item_id].to_dict().keys()) liked_transaction = set(liked_ratings[:, transaction_item_id].to_dict().keys()) liked_both = len(liked_pattern.intersection(liked_transaction)) seen_both = len(liked_pattern.intersection(seen_transaction)) print("{} usuarios han visto la transacción".format(seen_both)) if seen_both > 0: local_confidences.append(liked_both / seen_both) local_confidence = np.average(local_confidences) print("Confidence for pattern {}: {}".format(count, local_confidence)) g.write("Confidence for pattern {}: {}\n".format(count, local_confidence)) count += 1 global_confidences.append(local_confidence) global_confidence = np.average(global_confidences) print("Global confidence: {}".format(global_confidence)) g.write("\nGlobal confidence: {}".format(global_confidence)) def validate5(num_transactions): pattern_contexts = np.load("data/pattern_contexts.npy") transaction_contexts = np.load("data/transaction_contexts.npy") with open("data/decoded_itemsets.txt", "r") as f: transactions = f.readlines() with open("data/decoded_patterns.txt", "r") as f, open("validation5({}).txt".format(num_transactions), "w") as g: count = 0 global_scores = list() seen_ratings = build_ratings_table(0) liked_ratings = build_ratings_table(4) for line in f: context_vector = pattern_contexts[count, :].reshape(1, pattern_contexts.shape[1]) distances = scipy.spatial.distance.cdist(transaction_contexts, context_vector, 'cosine') distances = np.reshape(distances, distances.shape[0]) most_relevant_transaction_indexes = distances.argsort()[:num_transactions] pattern = line.strip().split(" ") liked_pattern = set(liked_ratings[:, int(pattern[0])].to_dict().keys()) for pattern_item in pattern: pattern_item_id = int(pattern_item) liked_pattern = liked_pattern.intersection( set(liked_ratings[:, pattern_item_id].to_dict().keys())) local_scores = list() for i in most_relevant_transaction_indexes: transaction = transactions[i].strip().split(" ") seen_transaction = set(seen_ratings[:, int(transaction[0])].to_dict().keys()) # liked_transaction = set(liked_ratings[:, int(transaction[0])].to_dict().keys()) transaction_movies = list() for transaction_item in transaction: transaction_item_id = int(transaction_item) transaction_movies.append(transaction_item_id) seen_transaction = set(seen_ratings[:, transaction_item_id].to_dict().keys()) seen_both = liked_pattern.intersection(seen_transaction) if len(transaction_movies) == 1: print("Caso 1 ({} users) calculando...".format(len(seen_both))) transaction_rating = np.average(seen_ratings.loc[seen_both,transaction_movies].values) print("Terminado") else: print("Caso 2 ({} users) calculando...".format(len(seen_both))) transaction_ratings = list() for user in seen_both: user_rating_avg = np.average(seen_ratings.loc[user,transaction_movies].values) # idx = pd.IndexSlice # print(seen_ratings.loc[idx[user, transaction_movies], :]) transaction_ratings.append(user_rating_avg) transaction_rating = np.average(transaction_ratings) print("Terminado") local_scores.append(transaction_rating) local_score = np.average(local_scores) print("Local score for pattern {}: {}".format(count, local_score)) g.write("Local score for pattern {}: {}\n".format(count, local_score)) count += 1 global_scores.append(local_score) print("Global score until now: {}".format(np.average(global_scores))) global_score = np.average(global_scores) print("Global score: {}".format(global_score)) g.write("\nGlobal score: {}".format(global_score)) # validate(5) # validate2(5) # validate3(5) # validate4(5) validate5(1)
true
116b6c4363297ac8c67f2c6cd71610aae2c51a00
Python
cwang-armani/learn-python
/01 python基础/3 列表.py
UTF-8
389
3.703125
4
[]
no_license
names=["laowang","laoliu","laoli"] names.append("laozhao") #插在最后 names.pop最后一个删除 两者对应 print(names) names.insert(0,"pig") #插在某个位置 print(names) names2=["laosi","laoliu"] names.extend(names2) print(names) names.remove("laowang")# del names[0] print(names) print(names[2:5]) print(names[::-1]) #列表也有字符串的性质 逆序 切片等
true
de8cc1205b0874a1b320a6f25c2160b6b6c920db
Python
amirziai/CarND-Advanced-Lane-Lines
/utils.py
UTF-8
340
3.25
3
[]
no_license
import pickle as pickle_module def unpickle(file_path): with open(file_path, 'rb') as file_handle: pickled_object = pickle_module.load(file_handle) return pickled_object def pickle(object_to_pickle, file_path): with open(file_path, 'wb') as file_handle: pickle_module.dump(object_to_pickle, file_handle)
true
9623ccb4b958e7c39d51aa5d5675efd950e2b7ab
Python
leether/Pub--1stECG
/final_codes/f_preprocess.py
UTF-8
4,530
2.5625
3
[ "MIT" ]
permissive
### data generation ### 预处理操作相关 ### 主要就是信号长度补齐和训练集数据标签生成的函数 ### 后续可以考虑训练过程中改用generator,这样内存需求小 import pandas as pd import numpy as np import os import scipy.io as sio import random from sklearn.preprocessing import MultiLabelBinarizer ### read_data # path here finalPath = '/media/uuser/data/final_run/' workPath = '/media/uuser/data/01_Project/data/' trainPath = '/media/jdcloud/Train/' valPath = '/media/jdcloud/Val/' ref_name = '/media/jdcloud/reference.csv' keysname = ('I','II','III','aVR','aVL','aVF', \ 'V1','V2','V3','V4','V5','V6','age','sex') def fill_length(ecg,t_len = 50000): ''' 信号长度补齐 ''' # ecg is a 1-D array len_s = len(ecg[0]) if len_s < t_len: len_f = (t_len - len_s) // 2 return np.pad(np.reshape(ecg,(len_s,)), (len_f,t_len-len_s-len_f),'wrap').T else: return ecg[0][0:t_len].T def data_gen(data_path,id_list,len_target = 50000): ''' single label data generation ''' files=os.listdir(data_path) files = sorted(files) num_records = len(id_list) t_len = len_target data_x = np.empty([num_records,t_len,12]) for i in range(num_records): #for f in files: ecg = np.empty([t_len,12]) mypath=data_path+files[id_list[i]] data = sio.loadmat(mypath) # read 12 leads for lead in range(12): temp=data[keysname[lead]] ecg[:,lead] = fill_length(temp,t_len) data_x[i,:,:] = ecg.reshape((1,t_len,12)) return data_x def data_gen_12_leads(data_path,ref_path,id_list,len_target = 50000): ''' multi-labels 12 leads data and labels generation ''' files=os.listdir(data_path) files = sorted(files) num_records = len(id_list) t_len = len_target data_x = np.empty([num_records,t_len,12]) f=open(ref_path) label=pd.read_csv(f) f.close() num_records,num_columns = label.shape data_y = np.empty([num_records,10,1]) tag = [] for i in range(num_records): ecg = np.empty([t_len,12]) mypath=data_path+files[id_list[i]] data = sio.loadmat(mypath) for lead in range(12): temp=data[keysname[lead]] ecg[:,lead] = fill_length(temp,t_len) data_x[i,:,:] = ecg.reshape((1,t_len,12)) temp = np.asarray(label.values[i,1:num_columns],dtype='float') temp = temp[~np.isnan(temp)] tag.append(temp.astype(int).tolist()) mlb = MultiLabelBinarizer() data_y = mlb.fit_transform(tag) return data_x,data_y def data_gen_18_leads(data_path,frft_path,ref_path,id_list,len_target = 50000): ''' multi-labels 18 leads data and labels generation ''' files=os.listdir(data_path) files = sorted(files) num_records = len(id_list) t_len = len_target data_x = np.empty([num_records,t_len,18]) f=open(ref_path) label=pd.read_csv(f) f.close() num_records,num_columns = label.shape data_y = np.empty([num_records,10,1]) tag = [] for i in range(num_records): #for f in files: ecg = np.empty([t_len,18]) mypath=data_path+files[id_list[i]] data = sio.loadmat(mypath) mypath=frft_path+files[id_list[i]] frft_signal = sio.loadmat(mypath) # read 12 leads for lead in range(12): temp=data[keysname[lead]] ecg[:,lead] = fill_length(temp,t_len) for lead in range(12,18): ecg[:,lead] = frft_signal[:,lead-12] data_x[i,:,:] = ecg.reshape((1,t_len,18)) temp = np.asarray(label.values[i,1:num_columns],dtype='float') temp = temp[~np.isnan(temp)] tag.append(temp.astype(int).tolist()) mlb = MultiLabelBinarizer() data_y = mlb.fit_transform(tag) return data_x,data_y def get_index(classes,ref_path=ref_name): '''classes count from 0''' # dropped not used label_name = ('label_0','label_1','label_2','label_3','label_4', \ 'label_5', 'label_6','label_7','label_8') labels = sio.loadmat(ref_path) idx1 = labels[label_name[classes]].tolist() idx2 = [] for i in range(9): if i != classes: id = labels[label_name[0]].tolist() random.shuffle(id) num = round(len(id) / 7703 * len(idx1)) id2+=id[0:num] return idx1,idx2
true
127f464017127b601cc4c8284b7cd8fad3def638
Python
Haoyi957/fastText-classification
/predict.py
UTF-8
611
2.828125
3
[]
no_license
from fasttext import FastText from text_preprocess import TextProcess import json model_path = "model/fasttext.bin" model = FastText.load_model(model_path) with open("label_dict.json", 'r') as load_f: label_dict = json.load(load_f) def predict(text): pre_label = model.predict(TextProcess().word_preprocess(text)) label_index = pre_label[0][0].strip().strip('__label__') label_name = list(label_dict.keys())[list(label_dict.values()).index(int(label_index))] return label_name if __name__ == '__main__': text = input("Please input sentence: ") print("result:", predict(text))
true
573927acc06c7426270398b2c88baa32891ec84e
Python
matiyashu/py_Armory
/fetchmakerHypothesistest.py
UTF-8
4,557
3.015625
3
[]
no_license
import numpy as np import fetchmaker # Number 7 from scipy.stats import binom_test # Number 9 from scipy.stats import f_oneway # Number 10 from statsmodels.stats.multicomp import pairwise_tukeyhsd # Number 13 from scipy.stats import chi2_contingency # Number 1 fetch_maker = fetchmaker.dogs # print(fetch_maker) # Number 2 rottweiler_tl = fetchmaker.get_tail_length('rottweiler') # print(rottweiler_tl) # Number 3 rottweiler_tl_mean = np.mean(rottweiler_tl) rottweiler_tl_std = np.std(rottweiler_tl) print('The rottweiler avg tail length is {} \n'. format(rottweiler_tl_mean)) print('The rottweiler std dev of tail length is {} \n'. format(rottweiler_tl_std)) # Number 4 whippet_rescue = fetchmaker.get_is_rescue('whippet') # print(whippet_rescue) # Number 5 # To count the number of entries that are not zero (1) num_whippet_rescues = np.count_nonzero(whippet_rescue) print('The count of (1) entry in the whippet_rescue is {} \n'.format(num_whippet_rescues)) # Number 6 # To get the number of samples using np.size num_whippets = np.size(whippet_rescue) print('The number of samples in the whippet_rescue is {} \n'.format(num_whippets)) # Number 7 and 8 expected_percentage_whippets_rescue = 0.08 binom_test_whippets_rescues = binom_test(num_whippet_rescues, num_whippets,expected_percentage_whippets_rescue) print('The P-Value of the whippet_rescue is {} \n'.format(binom_test_whippets_rescues)) print('So the P-Value from the Whippet_Rescue Binomial Test is %.3f and therefore, we accept the null hypothesis, which is that there is no difference between the observed number of whippet rescues and our expected whippet rescues percentage'%(binom_test_whippets_rescues)) print('\n') # Number 9 # since these datasets are numerical, we will be using ANOVA test to ensure the probability of False Positive stays 0.05 whippets_weight = fetchmaker.get_weight('whippet') terriers_weight = fetchmaker.get_weight('terrier') pitbulls_weight = fetchmaker.get_weight('pitbull') ANOVA_mid_size_dogs = f_oneway(whippets_weight, terriers_weight, pitbulls_weight) print('The P-value obtained from the ANOVA test on these three popular breeds is %.3f and therefore, we reject the null hypothesis, which is there is significant difference in the average weights of these three dogs, but we do not know which pair of datasets is significantly different.'% (ANOVA_mid_size_dogs[1])) print('\n') # Number 10 # To know which pair has a significant difference in their mean, we must use Tukey's Range test data = np.concatenate([whippets_weight, terriers_weight, pitbulls_weight]) labels = ['whippet'] * len(whippets_weight) + ['terrier'] * len(terriers_weight) + ['pitbull'] * len(pitbulls_weight) tukey_result = pairwise_tukeyhsd(data, labels, alpha = 0.05) print("Below is the table generated from the Tukey's Range Test to find out which pair of datasets is statistically different: \n {}".format(tukey_result)) print('\n') # Number 11 poodle_colors = fetchmaker.get_color('poodle') shihtzu_colors = fetchmaker.get_color('shihtzu') # print(poodle_colors) # print(shihtzu_colors) # Number 12 #First, obtain the color numbers for poodle breed black_poodle = np.count_nonzero(poodle_colors == 'black') brown_poodle = np.count_nonzero(poodle_colors == 'brown') gold_poodle = np.count_nonzero(poodle_colors == 'gold') grey_poodle = np.count_nonzero(poodle_colors == 'grey') white_poodle = np.count_nonzero(poodle_colors == 'white') #Secondly, obtain the color numbers for shihtzu breed black_shihtzu = np.count_nonzero(shihtzu_colors == 'black') brown_shihtzu = np.count_nonzero(shihtzu_colors == 'brown') gold_shihtzu = np.count_nonzero(shihtzu_colors == 'gold') grey_shihtzu = np.count_nonzero(shihtzu_colors == 'grey') white_shihtzu = np.count_nonzero(shihtzu_colors == 'white') #Next, create the contingency table using a list of lists color_table = [[black_poodle, black_shihtzu],[brown_poodle, brown_shihtzu], [gold_poodle, gold_shihtzu], [grey_poodle, grey_shihtzu], [white_poodle, white_shihtzu]] # Number 13 chi2, pval, dof, expected = chi2_contingency(color_table) print('The statistic of the color_table dataset is %.3f \n'%(chi2)) print('The P-Value of the color_table dataset is %.3f \n'% (pval)) print('The degrees of freedom from the color_table dataset is {} \n'.format(dof)) print('The expected table is as follows: \n {}'.format(expected)) print('\n') print('The conclusion from the Chi-Square test above is since the P-Value is %.3F, we reject the null hypothesis and stated that there is a significant difference between the datasets'% (pval))
true
238f95d2b4c538c572cefe7b7ae9ae7d01e99370
Python
yosefBP/holbertonschool-higher_level_programming
/0x04-python-more_data_structures/12-roman_to_int.py
UTF-8
397
3.359375
3
[]
no_license
#!/usr/bin/python3 def roman_to_int(roman_string): if not roman_string or not isinstance(roman_string, str): return (0) Rm = {"I": 1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000} rm = roman_string dlist = [Rm[i[0]] if Rm[i[0]] >= Rm[i[1]] else (-1*Rm[i[0]]) for i in zip(rm, rm[1:] + rm[-1])] print(dlist) dec = sum(dlist) return (dec)
true
28408403de22e860be64c89998394042d6e72018
Python
withtimesgo1115/Conveyor-Obejct-Tracking-and-Recognition
/object_tracking/preprocessing/DataLoader.py
UTF-8
1,083
2.75
3
[]
no_license
import cv2 import numpy as np import math import os import glob import random class DataLoader(): def __init__(self, path, idx=0, cvt=None, size=None): super(DataLoader).__init__() self.path = path self.cvt = cvt self.idx = idx self.size = size self._len = len(glob.glob(path+'*.png')) def cvtImg(self, im): if isinstance(self.cvt, np.int): im = cv2.cvtColor(im, self.cvt) if isinstance(self.size, tuple): im = cv2.resize(im, self.size) return im def getItem(self, idx): im = self.cvtImg(cv2.imread(self.path+str(idx)+'.png')) return im def __next__(self): im = self.getItem(self.idx) self.idx+=1 return im def getRest(self): img = [] for i in range(self.idx, self.len): img.append(self.__next__()) return img #return [self.__next__() for i in range(self.idx, self.len)] @property def len(self): return self._len
true
31833708681d39b6391596990c0580fe59da929f
Python
weilian1977/openmv-project
/openmv/qrcode_test.py
UTF-8
577
2.71875
3
[]
no_license
# Untitled - By: 黄惠泽 - 周四 5月 31 2018 import time, sensor, image sensor.reset() sensor.set_pixformat(sensor.RGB565) sensor.set_framesize(sensor.QVGA) # can be QVGA on M7... sensor.skip_frames(30) # 修改sensor配置之后, 跳过30帧 sensor.set_auto_gain(False) # must turn this off to prevent image washout... clock = time.clock() while(True): clock.tick() img = sensor.snapshot() img.lens_corr(1.8) # strength of 1.8 is good for the 2.8mm lens. for code in img.find_qrcodes(): print(code.payload()) print("FPS %f" % clock.fps())
true
616d1ae3a66812b1898d11c48e67721c6b5ebeab
Python
SS4G/master_graduate
/src/pre_proc_code/back_up/preprocess.py
UTF-8
3,344
2.765625
3
[]
no_license
import cv2 import numpy as np import sys sys.path.append('/home/szh-920/workspace') from master_graduate.logging import ColorLogging def processImage(imgFile, outputPaths): """ 处理一张图片 :param imgFile: :param outputPaths: :return: """ #加载原图 img=cv2.imread('./imgs/test_img.jpg') print('img:',type(img),img.shape,img.dtype) cv2.imwrite('./imgs/00_img.jpg',img) hsv=cv2.cvtColor(img,cv2.COLOR_BGR2HSV) cv2.imwrite('./imgs/01_hsv.jpg',hsv) #提取蓝色区域 blue_lower=np.array([100,50,50]) blue_upper=np.array([124,255,255]) mask=cv2.inRange(hsv,blue_lower,blue_upper) print('mask',type(mask),mask.shape) cv2.imwrite('./imgs/02_mask.jpg',mask) #模糊 blurred=cv2.blur(mask,(9,9)) cv2.imwrite('./imgs/03_blurred.jpg',blurred) #二值化 ret,binary=cv2.threshold(blurred,127,255,cv2.THRESH_BINARY) cv2.imwrite('./imgs/04_blurred_binary.jpg',binary) # 使区域闭合无空隙 # 创建一个闭合空间的算子 kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (21, 7)) closed = cv2.morphologyEx(binary, cv2.MORPH_CLOSE, kernel) cv2.imwrite('./imgs/05_closed.jpg',closed) #腐蚀和膨胀 ''' 腐蚀操作将会腐蚀图像中白色像素,以此来消除小斑点, 而膨胀操作将使剩余的白色像素扩张并重新增长回去。 ''' erode=cv2.erode(closed,None,iterations=4) cv2.imwrite('./imgs/06_erode.jpg',erode) dilate=cv2.dilate(erode,None,iterations=4) cv2.imwrite('./imgs/07_dilate.jpg',dilate) # 查找轮廓 image, contours, hierarchy=cv2.findContours(dilate.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE) print(type(image), image.shape) cv2.imwrite('./imgs/071_counter.img.jpg',dilate) print('轮廓个数:',len(contours)) i=0 res=img.copy() for con in contours: #轮廓转换为矩形 返回的对象是(中心点, 长宽, 旋转角) rect=cv2.minAreaRect(con) #矩形转换为box ColorLogging.debug("rect") ColorLogging.debug(rect) ColorLogging.info("box point") ColorLogging.info(cv2.boxPoints(rect)) # box是裁剪出来的矩形的四个定点的坐标 使用np.int0 取整数 box=np.int0(cv2.boxPoints(rect)) #在原图画出目标区域 #cv2.drawContours 参数 (目标图像, 轮廓点集组) cv2.drawContours(res,[box],-1,(0,0,255),2) cv2.drawContours(res, con, -1, (0, 255, 0), 2) print([box]) #计算矩形的行列 h1=max([box][0][0][1],[box][0][1][1],[box][0][2][1],[box][0][3][1]) h2=min([box][0][0][1],[box][0][1][1],[box][0][2][1],[box][0][3][1]) l1=max([box][0][0][0],[box][0][1][0],[box][0][2][0],[box][0][3][0]) l2=min([box][0][0][0],[box][0][1][0],[box][0][2][0],[box][0][3][0]) print('h1',h1) print('h2',h2) print('l1',l1) print('l2',l2) #加上防错处理,确保裁剪区域无异常 if h1-h2>0 and l1-l2>0: #裁剪矩形区域 temp=img[h2:h1,l2:l1] i=i+1 #显示裁剪后的标志 cv2.imwrite('./imgs/08_sign_{0}.jpg'.format(i),temp) #显示画了标志的原图 cv2.imwrite('./imgs/09_res.jpg',res)
true
4098a8a3d536f22a5587fb6e19d5ccd6ee51e70c
Python
nodepress0508/codingworkspace
/lc23.py
UTF-8
1,928
3.9375
4
[]
no_license
""" 23. Merge k Sorted Lists Merge k sorted linked lists and return it as one sorted list. Analyze and describe its complexity. Example: Input: [ 1->4->5, 1->3->4, 2->6 ] Output: 1->1->2->3->4->4->5->6 """ # Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next """ TLE:to much recursion called """ class Solution: def mergeKLists(self, lists: List[ListNode]) -> ListNode: if not list: return if len(lists) ==1: return list[0] mid = len(lists)//2 head1 = self.mergeKLists(lists[:mid]) head2 = self.mergeKLists(lists[mid:]) return self.sortedmerge(head1,head2) def sortedmerge(self,head1,head2): tmp = None if not head1: return head2 if not head2: return head1 if head1.val <= head2.val: tmp = head1 tmp.next = self.sortedmerge(head1.next,head2) else: tmp = head2 tmp.next = self.sortedmerge(head1,head2.next) """ TC:O(nlogn) SC:O(N) """ class Solution: def mergeKLists(self, lists: List[ListNode]) -> ListNode: if not lists: return if len(lists) ==1: return lists[0] mid = len(lists)//2 head1 = self.mergeKLists(lists[:mid]) head2 = self.mergeKLists(lists[mid:]) return self.sortedmerge(head1,head2) def sortedmerge(self,head1,head2): tmp = cur = ListNode(-1) while head1 and head2: if head1.val<head2.val: cur.next = head1 head1 = head1.next else: cur.next = head2 head2 = head2.next cur = cur.next cur.next = head1 or head2 return tmp.next
true
045cd775d5ef09e4f90490db9d397c404408fc73
Python
Iswaryadevi98/guvi
/codeketa/checkvowel72.py
UTF-8
129
3.375
3
[]
no_license
g=input() l=len(g) s=['a','e','i','o','u'] for i in range(0,l): if g[i] in s: print("yes") break else: print("no")
true
22670194e01fd5eb543fdddf86e79380fa334350
Python
NanoClem/AllergenFoodControl
/main.py
UTF-8
2,352
3.09375
3
[]
no_license
import openfoodfacts as openFF from AllergenFoodControl import AllergenFoodControl def printAllergens(country) : """ Affiche les allergenes selon le pays PARAM country : code pays """ allergens = openFF.facets.get_allergens() # allergenes disponibles country += ":" for prod in allergens : if country in prod['name'] : print(prod) def printProducts(country) : """ Affiche les éléments du produit selon le pays PARAM country : code pays """ products = openFF.products.get_by_language("fr") for prod in products : print("################## PRODUIT SUIVANT ##################") print(prod['product_name']) print(prod['id']) print("==========================================================") def getAllergens(country) : """ Retourne la liste des noms de tous les allergenes disponibles selon le pays PARAM country : code pays """ ret = [] allerg = openFF.facets.get_allergens() # allergenes disponibles country += ":" for prod in allerg : id = prod['id'] if country in id : # si l'allergene est disponible dans la langue du pays specifie toAppend = id.split(':')[1] # formatage str (enlever le code pays) ret.append(toAppend) # on recupere le nom de l'allergene return ret #__________MAIN__________ def main() : country = "fr" # code pays spec = input("Quel allergène voulez-vous éviter ? > ") #====================================================================== # CLASSE NoAllergenProducts #====================================================================== AFC = AllergenFoodControl(spec) NoAllergenProds = AFC.getNoAllergenProds(country) # produits sans les allergènes #====================================================================== # DATAFRAME ET CSV #====================================================================== foodDF = AFC.toDataFrame(NoAllergenProds) AFC.toExcel(foodDF, "Safe_Products.csv", "products") # Conversion Excel #AFC.toCsv(foodDF, "Safe_Products.csv") if __name__ == '__main__': # FONCTIONS DE TEST # printAllergens("fr") # printProducts("fr") main()
true
0416d9cb7b22bc90fddc3baeb3d063b1113d1341
Python
JcesarIA/learning-python
/EXCursoEmVideo/ex088.py
UTF-8
433
3.53125
4
[]
no_license
from random import randint from time import sleep quant = int(input('Quantos jogos voce quer? ')) jogo_temp = [] jogo_todos = [] n = 1 for c in range(quant): while n <= 6: num = randint(1, 60) if num not in jogo_temp: jogo_temp.append(num) n += 1 jogo_todos.append(jogo_temp[:]) jogo_temp.clear() n = 1 print(f'JOGO {c+1}: {jogo_todos[c]}') sleep(1) print(f'BOA SORTE')
true
0adaba5c72d164b9283208f96b52f854b3a5abc5
Python
LingshenHe/numerical-computation
/roberger.py
UTF-8
816
2.984375
3
[]
no_license
# -*- coding: utf-8 -*- """ Created on Sun Dec 17 20:36:51 2017 @author: Lingsheng He """ import numpy as np def f(x): return np.exp(-x**(2)) a=np.zeros(100) b=np.zeros(100) c=np.zeros(100) d=np.zeros(100) a[0]=(f(0)+f(0.8))/2 for i in range(1,3): a[i]=a[i-1]/2 for j in range(2**(i-1)): a[i]+=f((j+0.5)*0.8/(2**(i-1)))*0.8/2**(i) for i in range(3): b[i]=4*a[i+1]/3-a[i]/3 for i in range(2): c[i]=16*b[i+1]/15-b[i]/15 d[0]=64*c[1]/63-c[0]/63 i=4 while i>1: a[i]=a[i-1]/2 for j in range(2**(i-1)): a[i]+=f((j+0.5)*0.8/(2**(i-1)))*0.8/2**(i) b[i-1]=4*a[i]/3-a[i-1]/3 c[i-2]=16*b[i-1]/15-b[i-2]/15 d[i-3]=64*c[i-2]/63-c[i-3]/63 print(d[i-3],i) if abs(d[i-3]-d[i-4])<0.5*10**(-6): break i+=1 print('end')
true
959da08d228ae12ff62825f78fddbae0d4ec086a
Python
optimass/continual_learning_papers
/scripts/bibtex_to_md.py
UTF-8
2,111
2.6875
3
[]
no_license
from utils import generate_md_file import bibtexparser import os file_name = str(os.path.join(os.getcwd(),'bibtex.bib')) with open(file_name) as bibtex_file: bibtex_str = bibtex_file.read() bib_db = bibtexparser.loads(bibtex_str, parser=bibtexparser.bparser.BibTexParser(ignore_nonstandard_types=False)) ################################### Create Readme #################################### def plot_titles(titles): return '\n' + "## " + titles[0] + '\n' list_types = [["Classics", "Classic"], ["Empirical Study", "Empirical"], ["Surveys", "Survey", "survey"], ["Influentials", "Influential"], ["New Settings or Metrics", "Setting", "Metric"], ['General Continual Learning Methods (SL and RL)', 'General'], ["Task-Agnostic Continual Learning", "Task-Agnostic", "Task Agnostic", "TACL"], ["Regularization Methods", "Regularization"], ["Distillation Methods", "Distillation"], ["Rehearsal Methods", "Rehearsal"], ["Generative Replay Methods", "Generative Replay"], ["Dynamic Architectures or Routing Methods", "Architectures", "Dynamic Architecture", "routing"], ["Hybrid Methods", "Hybrid"], ["Continual Few-Shot Learning", "Continual-Meta Learning", "Continual-Meta"], ["Meta-Continual Learning"], ["Lifelong Reinforcement Learning", "Reinforcement", "Continual Reinforcement Learning", "CRL", "RL"], ["Task-Agnostic Lifelong Reinforcement Learning", "TACRL", "TALRL"], ["Continual Generative Modeling", "Generative Modeling"], ["Biologically-Inspired", "bio", "neuro", "neuroscience"], ["Miscellaneous", "Misc"], ["Applications"], ["Thesis"], ["Libraries", "Library", "Software"], ["Workshops", 'Workshop']] generate_md_file(DB=bib_db, list_classif=list_types, key="keywords", plot_title_fct=plot_titles, filename= "README.md", add_comments=True)
true
429a66831edb6998cd8c69809fdf13ca42710e17
Python
gitprouser/LeetCode-3
/word-ladder-ii_1.py
UTF-8
1,831
3
3
[]
no_license
import Queue class Solution(object): def findLadders(self, beginWord, endWord, wordlist): """ :type beginWord: str :type endWord: str :type wordlist: Set[str] :rtype: List[List[int]] """ if len(wordlist) == 0: return [] mp = {} mn = sys.maxint queue = Queue.Queue() queue.put(endWord) ladder = {string: sys.maxint for string in wordlist} ladder[endWord] = 0 wordlist.add(endWord) while not queue.empty(): word = queue.get() step = ladder[word] + 1 if step > mn: break for i in range(len(word)): for ch in string.ascii_lowercase: new_word = word[:i] + ch + word[i+1:] if new_word in ladder: if step > ladder[new_word]: continue elif step < ladder[new_word]: queue.put(new_word) ladder[new_word] = step if new_word in mp: mp[new_word].append(word) else: mp[new_word] = [word] if new_word == beginWord: min = step result = [] self.backTrace(beginWord, endWord, [], mp, result) return result def backTrace(self, word, end, lst, mp, result): if word == end: lst.append(end) result.append(lst[:]) lst.pop() return lst.append(word) if word in mp: for s in mp[word]: self.backTrace(s, end, lst, mp, result) lst.pop()
true
cbe4d6389e55cc4bad67bc5bdc03fef2ff7ac3e7
Python
Nuub073/Coursera-Python-for-everybody-Specialisation-Solutions
/Course_1/ex-4 6.py
UTF-8
326
3.296875
3
[]
no_license
hrs = input("Enter Hours: ") rate = input("Enter rate: ") x=input("Enter overtime hours: ") ohrs=float(x) def computepay(hrs, rate, ohrs): pay=int(hrs)*float(rate) if ohrs>0: opay=int(ohrs)*float(rate)*1.5 y=pay+opay return y else: return pay print("Pay", computepay(hrs, rate, ohrs))
true
368af304b724be453706441d848881a0b62e4ae3
Python
yulyzulu/holbertonschool-machine_learning
/supervised_learning/0x03-optimization/2-shuffle_data.py
UTF-8
257
3.25
3
[]
no_license
#!/usr/bin/env python3 """Shuffle Data""" import numpy as np def shuffle_data(X, Y): """Function that suffles the data poins in two matrices the same way""" m = X.shape[0] shuff = np.random.permutation(m) return X[shuff], Y[shuff]
true
8ffba1385c9416dd73fba51ba9969b7030fc2b81
Python
yamato1992/at_coder
/abc/abc133/c.py
UTF-8
212
2.6875
3
[]
no_license
L, R = map(int, input().split()) end = min(R, L + 4038) ans = 2018 for a in range(L, end): for b in range(a + 1, end+ 1): tmp = (a * b) % 2019 if tmp < ans: ans = tmp print(ans)
true
7ef122c4bcdeb7b147418cc28c0003522c95dd2d
Python
seanahmad/Trading-Strategy-Backtests
/Morning RTY Trend/daily_best_feature_record_test.py
UTF-8
7,760
2.640625
3
[]
no_license
# -*- coding: utf-8 -*- """ Created on Wed Jul 24 11:58:03 2019 @author: achen """ import datetime def daily_best_feature_record_test(Tele2, trade_day_list, base_time_start, base_time_end, test_time_start, test_time_end, sma_threshold, stop_threshold, bounce_stop_threshold, over_limit_length, loss_stop, trade_cost): import numpy as np import pandas as pd import telescope as ts import telescope_metrics as tm import datetime from scipy.stats.stats import pearsonr from tqdm import tqdm #--------------Input Parameters--------------- # data_mpk_file_path, start_date, end_date # df_2019 = data_mpk_file_path # START = start_date # END = end_date # STIME = base_time_start # ETIME = test_time_end # TIME_FRAME_LENGTH = time_frame_length # ROLLING_STEP = rolling_step #(has to be a factor of TIME_FRAME_LENGTH) BASE_TIME_START = base_time_start BASE_TIME_END = base_time_end TEST_TIME_START = test_time_start TEST_TIME_END = test_time_end SMA_THRESHOLD = sma_threshold STOP_THRESHOLD = stop_threshold TRADE_COST = trade_cost #--------------------------------------------- #--------------Input Parameters--------------- #df_2019 = r"Z:\KaiData\Theo\2019\YM.ESTheo.mpk" #START = datetime.datetime(2019, 7, 8, 0, 0) #END = datetime.datetime(2019, 7, 12, 23, 59) #STIME = datetime.time(8, 0, 0) #ETIME = datetime.time(15, 15, 0) #TIME_FRAME_LENGTH = 5 #ROLLING_STEP = '1T' #(has to be a factor of TIME_FRAME_LENGTH) #--------------------------------------------- #print(Tele2.df) #print(Tele2.grouped) #print(Tele2.num_of_groups) #print(Tele2.group_names) # time_list = Tele2.df['time'].unique() daily_profit_list = [] limit_stop_count = 0 bounce_stop_count = 0 trade_count = 0 # check_list = [] loss_trade_day_info = [['Date', 'Enter SMA Ratio', 'Entered Position', 'Exit Time', 'Exit Reason', 'Buy Price', 'Sell Price', 'Net Profit']] for trade_day in trade_day_list: base_timeframe_df = ts.select_date_timeframe(Tele2.df, trade_day, BASE_TIME_START, BASE_TIME_END) test_timeframe_df = ts.select_date_timeframe(Tele2.df, trade_day, TEST_TIME_START, TEST_TIME_END) profit = 0 base_bool_series = (base_timeframe_df['TheoPrice'] > base_timeframe_df['SMA']) above_ratio = base_bool_series.sum()/base_bool_series.size below_ratio = 1 - above_ratio bounce_stop_reach_count = 0 test_start_price = test_timeframe_df['TheoPrice'][0] test_end_price = test_timeframe_df['TheoPrice'][-1] test_price_move = test_end_price - test_start_price abs_test_price_move = np.absolute(test_price_move) theo_change = base_timeframe_df['TheoPrice'][-1] - base_timeframe_df['TheoPrice'][0] absolute_theo_change = (np.absolute(theo_change)) max_min_spread = base_timeframe_df['TheoPrice'].max() - base_timeframe_df['TheoPrice'].min() average = base_timeframe_df['TheoPrice'].mean() std_dev = base_timeframe_df['TheoPrice'].std() bounce_stop_threshold = max_min_spread if above_ratio >= SMA_THRESHOLD: enter_ratio = above_ratio enter_position = 'Long' exit_reason = 'Exit Time Reach' try: buy_price = test_timeframe_df[test_timeframe_df['time'] == test_time_start]['TheoPrice'][0] except: print(test_timeframe_df['time']) print(test_time_start) max_price = buy_price for time, price in test_timeframe_df['TheoPrice'].items(): # if price <= buy_price - STOP_THRESHOLD: # sell_price = price # close_time = time # limit_stop_count += 1 # exit_reason = 'Reach Loss Up Stop' # break if price > max_price: max_price = price bounce_stop_reach_count = 0 if price < (buy_price - loss_stop): exit_reason = 'Lose more than 500 dollars' sell_price = price close_time = time break if price < max_price - bounce_stop_threshold: bounce_stop_reach_count += 1 if bounce_stop_reach_count >= over_limit_length: sell_price = price close_time = time bounce_stop_count += 1 exit_reason = 'Reach Bounce Stop' break sell_price = price close_time = time # check_list.append([sell_price, buy_price, close_time]) trade_count += 1 profit = sell_price - buy_price - TRADE_COST if below_ratio > SMA_THRESHOLD: enter_ratio = below_ratio enter_position = 'Short' exit_reason = 'Exit Time Reach' sell_price = test_timeframe_df[test_timeframe_df['time'] == test_time_start]['TheoPrice'][0] min_price = sell_price for time, price in test_timeframe_df['TheoPrice'].items(): # if price >= sell_price + STOP_THRESHOLD: # buy_price = price # close_time = time # limit_stop_count +=1 # exit_reason = 'Reach Loss Up Stop' # break if price < min_price: min_price = price bounce_stop_reach_count = 0 if price > (sell_price + loss_stop): exit_reason = 'Lose more than 500 dollars' buy_price = price close_time = time break if price > min_price + bounce_stop_threshold: bounce_stop_reach_count += 1 if bounce_stop_reach_count >= over_limit_length: buy_price = price close_time = time bounce_stop_count += 1 exit_reason = 'Reach Bounce Stop' break buy_price = price close_time = time trade_count += 1 profit = sell_price - buy_price - TRADE_COST # check_list.append([sell_price, buy_price, close_time]) daily_profit_list.append(profit) if (profit < 0): day_info = [trade_day, enter_ratio, enter_position, close_time.time(), exit_reason, buy_price, sell_price, profit] loss_trade_day_info.append(day_info) time_stop_count = trade_count - limit_stop_count - bounce_stop_count return np.array(daily_profit_list), trade_count, limit_stop_count, bounce_stop_count, time_stop_count, loss_trade_day_info, above_ratio, below_ratio, theo_change, absolute_theo_change, max_min_spread, average, std_dev, exit_reason, test_start_price, test_end_price, test_price_move, abs_test_price_move # result = pd.DataFrame(column_list[1:], columns=column_list[0]) # result['time'] = time_list[:-ignore_col_num] # result.set_index('time', inplace=True) # result.to_excel(output_excel_path) # return result
true
fa591603e85d16a9b6dd08e69ab1f10d27b6571d
Python
maolintuntun/AI-Robotic-arm-
/python practice/T38-求矩阵对角线之和.py
UTF-8
521
3.609375
4
[]
no_license
# -*- coding: utf-8 -*- """ Created on Wed Sep 20 21:18:26 2017 @author: lenovo """ if __name__ == '__main__': a = [] sum = 0.0 for i in range(3): a.append([]) #向a数组中加行列表 for j in range(3): a[i].append(float(input("input num:\n"))) #向每个a的行列表中加入元素 for i in range(3): print(a[i][i]) #自己加的,输出对角线元素 因为对角线上的下标都是一样的数 sum += a[i][i] print (sum)
true
d00c26c63cde676cc7fbc1a0c6152a5476dac9e4
Python
verdejavic/dogfoodproj
/individual_pages.py
UTF-8
2,468
3.1875
3
[]
no_license
from urllib.request import Request, urlopen from bs4 import BeautifulSoup import time import csv hdr = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', 'Accept-Encoding': 'none', 'Accept-Language': 'en-US,en;q=0.8,it;q=0.6', 'Connection': 'keep-alive'} site = "http://slimdoggy.com/dogfood/dry/acana-adult-large-breed/" # open a page and get all html on that page - return bsObj def open_page(site): req = Request(site, headers=hdr) try: html = urlopen(req) except HTTPError as e: print(e.reason) except: print("Unknown error") bsObj = BeautifulSoup(html, "html.parser") return bsObj # WRITE CODE IN THIS FUNCTION # get data from each detail page and write it to a csv file def get_food_details(): # start a loop to read your urls, trying to put URLs in dict called food_list #for url in url_list: # open a url from a line in the file html = urlopen(site) # run the function open_page(site) on that to get bsObj open_page(site) # use bsbj to extract the individual data items abut this one dog food # get name name = bsObj.find("h1")[1] # get category table_1 = bsObj.find("table") cat = table_1.find("td")[2] # get brand brand = table_1.find("td")[4] # get protein table_2 = bsObj.find("table")[1] protein = table_2.find("td")[2] # get ingredients table_4 = bsObj.find("table")[3] ingredients = table_4.find("td")[1] # append the items to a python list to create one row for one dog food food_details = [name, cat, brand, protein, ingredients] row = [] for detail in food_details: try: row.append(detail.get_text()) except: row.append("N/A") # write the row to the CSV file - c.writerow( row ) c.writerow(row) # repeat # open a file for writing csvfile = open("dogfood.csv", 'w') c = csv.writer(csvfile) # write the header row for CSV file c.writerow( ['name', 'cat', 'brand', 'protein', 'ingredients'] ) # run your function get_food_details() # close the CSV file and save it csvfile.close()
true
65ee7ad735b500c2fc576b8b70e441d692a4d5c7
Python
rikusalminen/trimuncher
/polygon.py
UTF-8
762
3.5625
4
[ "Zlib" ]
permissive
def quads2tris(quads): for a, b, c, d in quads: yield (a, b, d) yield (d, b, c) def strips2tris(strips): for strip in strips: edge = tuple(strip[0:2]) odd = False for vertex in strip[2:]: yield (edge[0], edge[1], vertex) edge = (vertex, edge[1]) if not odd else (edge[0], vertex) odd = not odd def poly2strip(poly): assert len(poly) >= 3 def strip(top, bottom): odd = False while top or bottom: yield (bottom if odd else top).pop() odd = not odd mid = len(poly)/2 return [poly[0]] + list(strip(list(reversed(poly[1:mid+1])), list(poly[mid+1:]))) def polys2tris(polys): return strips2tris(map(poly2strip, polys))
true
094b7748a094b227d5582221764193eb6852ec61
Python
gusals6804/TopicModelling
/cluster_v2.py
UTF-8
3,843
2.5625
3
[]
no_license
from pandas import DataFrame as df import re import pandas as pd from gensim.models.word2vec import Word2Vec from sklearn import decomposition from sklearn.cluster import KMeans from sklearn.cluster import DBSCAN from spherecluster import SphericalKMeans from sklearn.manifold import TSNE import matplotlib.pyplot as plt import numpy as np import matplotlib as mpl def cluster(model, file, model_name): result = model.wv # 어휘의 feature vector topic = pd.read_pickle('C:\\Users\\gusals\\Desktop\\현민\\딥러닝 특론\\최종정리\\topic\\%s.pkl' % file) #print(result.vocab.keys()) #vocabs = result.vocab.keys() vocabs = [] for i in topic['sentences']: for j in i: vocabs.append(j) print(len(vocabs)) clean_file = open('C:\\Users\\gusals\\Desktop\\현민\\딥러닝 특론\\최종정리\\data\\클러스터전처리.txt', 'r') lines = clean_file.readlines() clean_file.close() remove_list = lines[0].split(', ') word_vectors = [] clean_vocabs = [] clean = ['코로'] for v in vocabs: if v in clean: v = re.sub('코로', '코로나', v) print(v) try: word_vectors.append(result[v]) clean_vocabs.append(v) except: print(v) clean_vocabs.remove(v) num_clusters = 40 #int(len(word_vectors) / 10) # int(word_vectors.shape[0]/50) # 어휘 크기의 1/5나 평균 5단어 print(word_vectors[0]) num_clusters = int(num_clusters) # tsne = TSNE(n_components=2) # reduced_X = tsne.fit_transform(word_vectors) # idx = DBSCAN(eps=4, min_samples=20).fit(word_vectors) # pca = decomposition.PCA(n_components=10).fit(word_vectors) reduced_X = pca.transform(word_vectors) # elbow(word_vectors) kmeans_clustering = KMeans(init="k-means++", n_clusters=num_clusters, random_state=0) idx = kmeans_clustering.fit_predict(reduced_X) # skm = SphericalKMeans(n_clusters=40) # idx = skm.fit_predict(word_vectors) idx = list(idx) print(len(vocabs)) print(len(idx)) names = clean_vocabs print(names) word_centroid_map = {names[i]: idx[i] for i in range(len(idx))} dfIndustry = pd.DataFrame(columns=["cluster", "keyword"]) for c in range(num_clusters): # 클러스터 번호를 출력 print("\ncluster {}".format(c)) words = [] cluster_values = list(word_centroid_map.values()) for i in range(len(cluster_values)): if (cluster_values[i] == c): words.append(list(word_centroid_map.keys())[i]) if len(words) == 1: print(words) rowIndustry = [c, words] dfIndustry.loc[len(dfIndustry)] = rowIndustry print(dfIndustry) count = 0 last_word = [] for i in dfIndustry['keyword']: clean_v = [] for j in i: if j not in remove_list: clean_v.append(j) last_word += clean_v dfIndustry['keyword'][count] = clean_v count += 1 print(len(last_word)) print(last_word) # tsne_plot(model, clean_vocabs) print(dfIndustry) for i in dfIndustry['keyword']: print(i) dfIndustry.to_pickle("C:\\Users\\gusals\\Desktop\\현민\\딥러닝 특론\\최종정리\\군집\\군집_%s_pca10_KMeans.pkl" % model_name) def elbow(x): sse = [] for i in range(1, 50): km = KMeans(n_clusters=i, init='k-means++', random_state=0) km.fit(x) sse.append(km.inertia_) plt.plot(range(1, 50), sse, marker='o') plt.savefig('./cluster.png') model_name = '2017_report_size20_win10_min10_iter500_hs0' model = Word2Vec.load('C:\\Users\\gusals\\Desktop\\현민\\딥러닝 특론\\최종정리\\word2vec\\%s' % model_name) file = '2017' #cluster(model, file, model_name)
true
a77b5f4d5f62c832a51c30c695649a5831a72d78
Python
NishVasishta/proj_vision
/kiosk1.py
UTF-8
11,647
3.359375
3
[]
no_license
bill=[] bill1=[] def intro(): print('-'*80) print() first_view=[[1,'PIZZA'], [2,'DESSERTS'], [3,'SOFTDRINKS'], [4,'SNACKS']] print("ENTER YOUR CHOICE".center(60)) print('-'*80) print() for i in first_view: print(i[0]," ",i[1]) print() print('-'*80) c=input("Enter the Card Serial Number: ") if c.isdecimal(): if c=='1': bill_piz=pizza_menu() bill1.append(bill_piz) print("pizza bill is",bill_piz) amtp=[i[-1] for i in bill_piz] print(amtp) t_total_piz=sum(amtp) print("Total amt of pizza =",t_total_piz) print() print("Would you like to order more ? : ") if input("If yes, press 'Y'. Else press 'N' : ").lower() =='y': intro() else: print() print("Thanks for visiting !!") bill.append(t_total_piz) print(bill) elif c=='2': bill_des=dessert_menu() print(bill_des) bill1.append(bill_des) amtd=[i[-1] for i in bill_des] print(amtd) t_total_des=sum(amtd) print("Total amt of Desserts =",t_total_des) print() print("Would you like to order more ? : ") if input("If yes, press 'Y'. Else press 'N' : ").lower() =='y': intro() else: print() print("Thanks for visiting !!") bill.append(t_total_des) elif c=='3': bill_bev=drinks_menu() print(bill_bev) bill1.append(bill_bev) amtb=[i[-1] for i in bill_bev] print(amtb) t_total_bev=sum(amtb) print("Total amt of Softdrinks =",t_total_bev) print() print("Would you like to order more ? : ") if input("If yes, press 'Y'. Else press 'N' : ").lower() =='y': intro() else: print() print("Thanks for visiting !!") bill.append(t_total_bev) elif c=='4': bill_bites=snacks_menu() print(bill_bites) bill1.append(bill_bites) amts=[i[-1] for i in bill_bites] print(amts) t_total_bites=sum(amts) print("Total amt of Snacks =",t_total_bites) print() print("Would you like to order more ? : ") if input("If yes, press 'Y'. Else press 'N' : ").lower() =='y': intro() else: print() print("Thanks for visiting !!") bill.append(t_total_bites) else: print("Invalid Input") print("*"*80) if input("Would you like to try again : ").lower()=='y': intro() else: print() print("Thanks for visiting !!") return (bill1,bill) def pizza_menu(): print("YOU ARE IN PIZZA WORLD") print() menu=[[1,'Paneer Tikka',200,300,400], [2,'Capsicum',250,350,450], [3,'Golden Corn',250,350,400], [4,'Special',150,300,450]] print('-'*80) print() print(" SL.NO PIZZA ") print() print(" Small Medium Large") print() for i in menu: print(" ",i[0]," ",i[1]) print() print(" ", i[2]," ", i[3]," ", i[4]) print("-"*80) print() lp=[] s=True while s: t=[] choice=input("ENTER YOUR CHOICE USING SL.NO : ") if choice=='1' or choice=='2' or choice=='3' or choice=='4': size=input("Enter the size required as S for small, M for medium and L for Large : ").lower() if size =='s' or size =='m' or size =='l': quant=int(input("ENTER THE QUANTITY : ")) t.append(choice) n=0 if size == 's': n=2 elif size == 'm': n=3 elif size == 'l': n=4 cost =int(menu[int(choice)-1][n]) print(cost) product=menu[int(choice)-1][1] t.append(product) t.append(cost) t.append(quant) t.append(cost*quant) else: print("INVALID INPUT") break if t!=[]: lp.append(t) print() print(lp) more=input("Would you like to order more Pizza ?? If yes, Enter Y. If no,Enter N : ").lower() if more!='y': print("Thnaks for shopping") s=False break continue s=False print("*"*80) return(lp) def dessert_menu() : print("YOU ARE IN DESSERT WORLD") print() des_menu=[[1,'lava cake ',200], [2,'Ice cream fudge',250], [3,'something sweet',250], [4,'random dessert ',150]] print('-'*80) print() print(" SL.NO DESSERT COST ") print() print(" " ) print() for i in des_menu: print(" ",i[0]," ",i[1]," ", i[2]) print() print("-"*80) print() ld=[] s=True while s: t=[] choice=input("ENTER YOUR CHOICE USING SL.NO : ") if choice=='1' or choice=='2' or choice=='3' or choice=='4': quant=int(input("ENTER THE QUANTITY :")) t.append(choice) cost =int(des_menu[int(choice)-1][2]) product=des_menu[int(choice)-1][1] print(cost) t.append(product) t.append(cost) t.append(quant) t.append(cost*quant) else: print("INVALID INPUT") break if t!=[]: ld.append(t) print() print(ld) more=input("Would you like to order more Desserts ?? If yes, Enter Y. If no,Enter N : ").lower() if more!='y': print("Thnaks for shopping") s=False break continue s=False print("*"*80) print(ld) return(ld) def drinks_menu() : print("YOU ARE IN DRINKS WORLD") print() bev_menu=[[1,'Mountain Dew ',160], [2,'Coca Cola ',165], [3,'Mint Cooler ',140], [4,'Green Apple Mojito',180]] print('-'*80) print() print(" SL.NO SOFTDRINKS COST ") print() print(" " ) print() for i in bev_menu: print(" ",i[0]," ",i[1]," ",i[2]) print() print("-"*80) print() lb=[] s=True while s: t=[] choice=input("ENTER YOUR CHOICE USING SL.NO : ") if choice=='1' or choice=='2' or choice=='3' or choice=='4': quant=int(input("ENTER THE QUANTITY : ")) t.append(choice) cost =int(bev_menu[int(choice)-1][2]) product=bev_menu[int(choice)-1][1] print(cost) t.append(product) t.append(cost) t.append(quant) t.append(cost*quant) else: print("INVALID INPUT") break if t!=[]: lb.append(t) print() print(lb) more=input("Would you like to order more Drinks ?? If yes, Enter Y. If no,Enter N : ").lower() if more!='y': print("Thnaks for shopping") s=False break continue s=False print("*"*80) return(lb) def snacks_menu() : print("YOU ARE IN THE WORLD OF QUICK BITES") print() bite_menu=[[1,'NACHOS ',200], [2,'CHEESE BALLS ',250], [3,'POTATO WEDGES',150], [4,'GARLIC BREAD ',150]] print('-'*80) print() print(" SL.NO SNACKS COST ") print() print(" " ) print() for i in bite_menu: print(" ",i[0]," ",i[1]," ", i[2]) print() print("-"*80) print() ls=[] s=True while s: t=[] choice=input("ENTER YOUR CHOICE USING SL.NO : ") if choice=='1' or choice=='2' or choice=='3' or choice=='4': quant=int(input("ENTER THE QUANTITY : ")) t.append(choice) cost =int(bite_menu[int(choice)-1][2]) product=bite_menu[int(choice)-1][1] print(cost) t.append(product) t.append(cost) t.append(quant) t.append(cost*quant) else: print("INVALID INPUT") break if t!=[]: ls.append(t) print() print(ls) more=input("Would you like to order more Snacks ?? If yes, Enter Y. If no,Enter N : ").lower() if more!='y': print("Thnaks for shopping") s=False break continue s=False print("*"*80) return(ls) a=True while a: name=input("Enter Your Name: ") if name=='' or name==' ': print("INVALID NAME!! DO YOU WISH TO CONTINUE?!") c=input("Enter 'Y' or 'N': ").lower() if c!='y': print("THANK YOU FOR VISITING!! HAVE A PLEASANT DAY!!") a=False break continue a=False print() print('-'*80) print("CENTRAL PERK".center(60)) print() print("Welcome Mr./Mrs./Miss ",name) print('-'*80) t_bill=[] t_bill=intro() if sum(t_bill[1]) != 0: print() print("-"*60) print("-"*100) print("CENTRAL PERK".center(100)) print("-"*100) print("Sl.No PRODUCT COST QUANTITY FINAL_PRICE ") print("-"*100) b=[] ts=sum(t_bill[1]) sgst= ts*(0.025) cgst= ts*(0.025) tot=ts+sgst+cgst for i in bill1: for j in range(len(bill1)): if j<len(i): b.append(i[j]) count=0 for i in b: count+=1 print(count," ",i[1]) print(" ",i[2]," ",i[3]," ",i[4] ) print("-"*100) print(" SGST : ", sgst) print("-"*100) print(" CGST : ", sgst) print("-"*100) print(" Total Bill : ", tot,"Rs") print("-"*100) print() print("*"*100) print("THANKS FOR SHOPPING !! HAVE A NICE DAY !!".center(100)) print("*"*100)
true
79d92ad7e1a67db801fb1a3238a11c3c173f6040
Python
SixLeopard/DrawingProgram
/Draw on tap.py
UTF-8
6,991
2.546875
3
[]
no_license
#Copyright Jamie Westerhout import tkinter as tk import turtle import random import sqlite3 import sys root = tk.Tk() root.configure(bg="grey18") frame = tk.Frame(root) canvas = tk.Canvas(master = root, width = 1500, height = 900, bg="black") canvas.pack() sys.stdout = open("log.txt", "+a") screen = turtle.TurtleScreen(canvas) t = turtle.RawTurtle(screen) root.title("Drawing Program - Copyright Jamie Westerhout, 2020") #root.iconbitmap('ICO.ico') t.shape("circle") t.shapesize(0.1) t.pensize(2) screen.bgcolor("white") frame.pack() t.speed(100000) #screen = t.TurtleScreen() loop2 = 1 test = 1 t.color("red") def pen1(): unbind() screen.onscreenclick(draw1) def pen2(): unbind() screen.onscreenclick(draw2) def pen3(): unbind() screen.onscreenclick(draw3) #def pen4(): # t.penup() # screen.onscreenclick(draw4) def pen4(): screen.onscreenclick(none2) canvasturtle=screen.getcanvas() t.penup() canvasturtle.bind("<B1-Motion>", draw42) canvasturtle.bind("<Button-1>", draw42) canvasturtle.bind("<ButtonRelease-1>", release) def pen5(): unbind() t.penup() screen.onscreenclick(none) def pen7(): unbind() t.penup() screen.onscreenclick(Record) def none(x, y): unbind() t.penup() t.goto(x, y) def unbind(): canvasturtle=screen.getcanvas() canvasturtle.unbind("<B1-Motion>") canvasturtle.unbind("<Button-1>") canvasturtle.unbind("<ButtonRelease-1>") def none2(x, y): print(x, y) def BGBlack(): screen.bgcolor("black") def BGWhite(): screen.bgcolor("white") def PRed(): t.pencolor("Red") def PWhite(): t.pencolor("white") def PBlack(): t.pencolor("black") def PBlue(): t.pencolor("Blue") def Record(x, y): canvasturtle=screen.getcanvas() canvasturtle.unbind("<B1-Motion>") t.goto(x, y) t.pendown() f = open('CustomPen.pen','a+') f.write(str(float(t.xcor()))) f.write(", ") f.write(str(float(t.ycor()))) f.write("\n") f.close() print(str(t.pos())) def CustomPen1(): canvasturtle=screen.getcanvas() canvasturtle.unbind("<B1-Motion>") p = open('CustomPen.pen','r') t.penup() for x in p: x1, y1 = map(str , x.split()) x1 = x1[:-1] t.goto(float(x1), float(y1)) t.pendown() p.close def draw1(x , y): t.penup() t.goto(x, y) Bac() def draw2(x , y): t.penup() t.goto(x, y) star() def draw3(x , y): t.penup() t.goto(x, y) other() def draw4(x , y): t.goto(x, y) t.pendown() def draw42(event): canvasturtle=screen.getcanvas() canvasturtle.unbind("<B1-Motion>") t.goto(event.x-754, 452-event.y) print(event.x-754 , 452-event.y) t.pendown() canvasturtle.bind("<B1-Motion>", draw42) def release(event): canvasturtle=screen.getcanvas() unbind() t.penup() f = open('CustomPen.pen','a+') f.write(str(float(t.xcor()))) canvasturtle.bind("<Button-1>", reinziate) def reinziate(event): t.penup() t.goto(event.x-754, 452-event.y) pen4() def Bac(): t.speed(100000) t.setheading(90) t.penup() t.forward(90) t.setheading(180) t.pendown() loop = 1 legnth = 1 speed = 10000 angle = 1 while loop <= 200: t.forward(legnth) t.left(angle) t.speed(10000000) legnth += 1.5 angle += 1 loop += 1 txcord = t.xcor() tycord = t.ycor() print(txcord , tycord) #if txcord >= 250: # t.color("green") # if tycord >= 250: # t.color("red") #elif txcord <= 250: # t.color("blue") # if tycord <= 250: # t.color("black") def star(): t.penup() t.setheading(180) t.pendown() loop = 1 legnth = 100 speed = 10000 angle = 200 while loop <= 18: t.forward(legnth) t.left(angle) t.speed(10000000) legnth += 1.5 speed += 1.5 angle += 1 loop += 1 txcord = t.xcor() tycord = t.ycor() print(txcord , tycord) def other(): t.penup() t.setheading(180) t.pendown() loop = 1 legnth = 50 speed = 10000 angle = 90 while loop <= 50: t.forward(legnth) t.left(angle) t.speed(10000000) legnth += 1.5 speed += 1.5 angle += 1 loop += 1 txcord = t.xcor() tycord = t.ycor() print(txcord , tycord) button = tk.Button(master = root, text="QUIT", fg="red", command=quit, highlightthickness=5, bd=0.5) button.pack(side=tk.LEFT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Shape 1", fg="black", command=pen1, highlightthickness=5, bd=0.5) button.pack(side=tk.LEFT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Shape 2", fg="black", command=pen2, highlightthickness=5, bd=0.5) button.pack(side=tk.LEFT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Shape 3", fg="black", command=pen3, highlightthickness=5, bd=0.5) button.pack(side=tk.LEFT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Pen", fg="black", command=pen4, highlightthickness=5, bd=0.5) button.pack(side=tk.LEFT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="none", fg="black",command=pen5, highlightthickness=5, bd=0.5) button.pack(side=tk.LEFT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Play", fg="black",command=CustomPen1, highlightthickness=5, bd=0.5) button.pack(side=tk.LEFT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Record", fg="black",command=pen7, highlightthickness=5, bd=0.5) button.pack(side=tk.RIGHT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="BG White", fg="black",command=BGWhite, highlightthickness=5, bd=0.5) button.pack(side=tk.RIGHT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="BG Black", fg="black",command=BGBlack, highlightthickness=5, bd=0.5) button.pack(side=tk.RIGHT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Pen red", fg="black",command=PRed, highlightthickness=5, bd=0.5) button.pack(side=tk.RIGHT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Pen Black", fg="black",command=PBlack, highlightthickness=5, bd=0.5) button.pack(side=tk.RIGHT, padx=[10,20], pady=[5,5]) button = tk.Button(master = root, text="Pen White", fg="black",command=PWhite, highlightthickness=5, bd=0.5) button.pack(side=tk.RIGHT, padx=[10,20], pady=[0,0]) button = tk.Button(master = root, text="Pen Blue", fg="black",command=PBlue, highlightthickness=5, bd=0.5) button.pack(side=tk.RIGHT, padx=[10,20], pady=[0,0]) button.pack() screen.listen() screen.mainloop() t.mainloop() root.mainloop()
true
8af23e75e774cf0e2a97deb23bab3a8c8c006a65
Python
SiddharthYennuwar/COVID-19-Vaccine-Distribution-System
/src/Admin.py
UTF-8
3,905
2.6875
3
[]
no_license
from tkinter import * from tkinter import messagebox from tkinter import ttk import pymysql import Modify from PIL import ImageTk # pip install pillow class Admin: def __init__(self, root): self.root = root self.root.title("Admin Portal") # Designate Height and Width of our window self.app_width = 1280 self.app_height = 720 self.screen_width = self.root.winfo_screenwidth() self.screen_height = self.root.winfo_screenheight() self.x = (self.screen_width / 2) - (self.app_width / 2) self.y = (self.screen_height / 2) - (self.app_height / 2) self.root.geometry(f'{self.app_width}x{self.app_height}+{int(self.x)}+{int(self.y)}') self.root.resizable(False, False) # Image Frame self.bg = ImageTk.PhotoImage(file="sample.jpg") self.bg_image = Label(self.root, image=self.bg).place(x=1, y=1, relwidth=1, relheight=1) # insert image into frame # Admin Frame frame_admin = Frame(self.root, bg='#acb6ff') frame_admin.place(x=190, y=40, height=640, width=900) # Give title into frame title_admin = Label(frame_admin, text="Admin Portal", font=("Cambria", 30, "bold"), fg='#002147', bg='#acb6ff').place( x=335, y=30) # View Vaccine Button view_vaccine = Button(frame_admin, command=self.view_vacc, text='View Available Vaccine Stock', font=("Georgia", 15, 'bold'), fg='#2604eb', bg='white').place(x=40, y=110) self.hospital_choosen = ttk.Combobox(width=50, font=('Sans-serif', 13), state='readonly', justify=CENTER) # Adding combobox drop down list self.hospital_choosen['values'] = ('Alphine Life Solutions', 'Atlantis Hospital', 'Fortis Hospital', 'Four Care Hospital', 'Hinduja Healthcare', 'Holy Spirit Hospital', 'JJ Hospital', 'KEM Hospital', 'Kokilaben Hospital', 'Lifeline Hospital', 'Lilavati Hospital', 'Phoenix Hospital', 'Saraswati Hospital', 'Seven Hills Hospital', 'Shatabdi Hospital', 'Silverline Hospital', 'Suchak Hospital', 'Zenith Hospital') self.hospital_choosen.place(x=590, y=155) self.hospital_choosen.current(0) # Modify Vaccine Button modify_vaccine = Button(frame_admin, command=self.mod_vacc, text='Modify Current Vaccine Stock', font=("Georgia", 15, 'bold'), fg='#2604eb', bg='white').place(x=40, y=180) # Modify appointment button modify_appointment = Button(frame_admin, command=self.mod_app, text='Modify Current Appointments', font=("Georgia", 15, 'bold'), fg='#2604eb', bg='white').place(x=40, y=250) def view_vacc(self): try: con = pymysql.connect(user='root', password='', host='localhost', database='registration') cur = con.cursor() cur.execute("Select vaccines from vaccine where hospital = %s", str(self.hospital_choosen.selection_get())) row = cur.fetchone() vacc_lb = Label( text=str(row[0]) + " vaccines are available in " + str(self.hospital_choosen.selection_get()), font=("Cambria", 16, "bold"), fg='#002147', bg='#acb6ff') vacc_lb.place(x=590, y=225) con.close() except Exception as e: messagebox.showerror("Error", f"Error due to:{str(e)}", parent=self.root) def mod_vacc(self): import Modify as mod mod.main() def mod_app(self): import Appointment_Admin as apt apt.main() def main(): root = Tk() obj = Admin(root) root.mainloop() if __name__ == "__main__": main()
true
ea47b913a5099d0913db8f272f9c2bdac3c19f97
Python
bxxfighting/emotion
/account/models.py
UTF-8
3,368
2.515625
3
[]
no_license
from django.db import models from django.contrib.auth.hashers import make_password from django.contrib.auth.hashers import check_password from base.models import BaseModel class UserModel(BaseModel): ''' 用户表 ''' ST_NORMAL = 1 ST_FORBIDDEN = 2 ST_CHOICES = ( (ST_NORMAL, '正常'), (ST_FORBIDDEN, '禁用'), ) username = models.CharField('账户', max_length=128) password = models.CharField('密码', max_length=256) name = models.CharField('姓名', max_length=128, null=True, default='') email = models.CharField('邮箱', max_length=128, null=True, default='') phone = models.CharField('联系方式', max_length=64, null=True, default='') status = models.IntegerField('状态', choices=ST_CHOICES, null=True, default=ST_NORMAL) def to_dict(self): data = { 'id': self.id, 'username': self.username, 'name': self.name, 'email': self.email, 'phone': self.phone, } objs = UserRoleModel.objects.filter(is_deleted=False, user_id=self.id).all() roles = [obj.role.to_dict() for obj in objs] data['roles'] = roles return data def set_password(self, password): ''' 设置密码 ''' self.password = make_password(password) self.save() def check_password(self, password): ''' 校验密码 ''' return check_password(password, self.password) class Meta: db_table = 'user' class RoleModel(BaseModel): ''' 角色表 ''' name = models.CharField('角色名', max_length=128) sign = models.CharField('标识', max_length=128) class Meta: db_table = 'role' def to_dict(self): data = super().to_dict() objs = RoleModModel.objects.filter(is_deleted=False, role_id=self.id).all() mods = [obj.mod.to_dict() for obj in objs] data['mods'] = mods return data class UserRoleModel(BaseModel): ''' 用户与角色关联关系表 ''' user = models.ForeignKey(UserModel, on_delete=models.CASCADE) role = models.ForeignKey(RoleModel, on_delete=models.CASCADE) class Meta: db_table = 'user_role' class ModModel(BaseModel): ''' 模块 ''' name = models.CharField('模块名', max_length=128) sign = models.CharField('标识', max_length=128) rank = models.IntegerField('排序值', default=0) class Meta: db_table = 'mod' def to_dict(self): data = super().to_dict() objs = ModUrlModel.objects.filter(is_deleted=False, mod_id=self.id).all() urls = [obj.url.to_dict() for obj in objs] data['urls'] = urls return data class RoleModModel(BaseModel): ''' 角色与模块关联关系表 ''' role = models.ForeignKey(RoleModel) mod = models.ForeignKey(ModModel) class Meta: db_table = 'role_mod' class UrlModel(BaseModel): ''' URL ''' name = models.CharField('url名称', max_length=128) url = models.TextField('url地址') class Meta: db_table = 'url' class ModUrlModel(BaseModel): ''' 模块与URL关联关系 ''' mod = models.ForeignKey(ModModel) url = models.ForeignKey(UrlModel) class Meta: db_table = 'mod_url'
true
264fc50d1acab1d4c991aa0560b4840223f167e7
Python
mateusKoppe/process-simulation
/simulation/components/duration.py
UTF-8
272
2.90625
3
[]
no_license
import re duration_regex = r"^TS=(\d+)$" def is_duration(str): return re.search(duration_regex, str) def generate_duration(raw): groups = re.search(duration_regex, raw).groups() return { "type": "duration", "value": int(groups[0]), }
true
8e65d6488fb299b95c5c1c66f8ae1cfa2f233b07
Python
JonSeijo/project-euler
/problems 40-49/problem_46.py
UTF-8
2,403
4.3125
4
[]
no_license
# Goldbach's other conjecture # Problem 46 """ It was proposed by Christian Goldbach that every odd composite number can be written as the sum of a prime and twice a square. 9 = 7 + 2*1^2 15 = 7 + 2*2^2 21 = 3 + 2*3^2 25 = 7 + 2*3^2 27 = 19 + 2*2^2 33 = 31 + 2*1^2 It turns out that the conjecture was false. What is the smallest odd composite that cannot be written as the sum of a prime and twice a square? """ import math def getPrimes(maxNumber): primes = [2] number = 3 while number < maxNumber: for div in primes: if div >= int(math.sqrt(number)) + 1: isPrime = True break if number % div == 0: isPrime = False break if isPrime: primes.append(number) number += 2 return primes def main(): """ Solves in about 20 seconds """ # Pregenerate a list of primes until 10000 (guess) primes = getPrimes(10000) answer = 0 number = 7 isComposite = False while answer == 0: # Add 2 each time because it cant be divisible by 2 number += 2 # Check if number is composite for div in primes: # If is divisible, is composite if number % div == 0: isComposite = True break # If divisor is greater than the sqrt, then the number is prime if div >= int(math.sqrt(number)) + 1: isComposite = False break if isComposite: canBeWritten = False # Check for each prime until is greater than number for prime in primes: if prime > number: break # Tries with primes[0] + 2*i^2 # Then tries with primes[0] + 2*(i+1)^2 # Then tries with primes[0] + 2*(i+2)^2 # ... until is greater than number or canBeWritten for single in range(1, int(number/2)): newNumber = prime + 2 * single*single if newNumber > number: break if newNumber == number: canBeWritten = True break if not canBeWritten: answer = number print "answer: " + str(answer) if __name__ == "__main__": main()
true
a480b878f70b07e4783823f0a4bfa2571cfea834
Python
RabidSheep55/TescoScraper2
/scripts/optimize.py
UTF-8
1,748
3
3
[]
no_license
from getProducts import get_products import json import concurrent.futures from itertools import repeat import numpy as np from time import time import matplotlib.pyplot as plt def timed_get_products(count): ''' Timed version of the get_products function ''' start = time() products = get_products(1, count) end = time() if len(products) == count: return end - start else: return 0 def evaluate_best_params(): ''' Figures out the optimum amout of products to fetch each time ''' items = np.power(2, np.arange(1, 12)) # Make it so it repeats each a few times (for averaging) _, items = np.meshgrid(range(10), items) items = items.flatten() with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: results = executor.map(timed_get_products, items) with open("perfs.json", 'wb') as file: json.dump({ "items": [int(i) for i in items], "times": list(results) }, file) def plot_perfs(): ''' Plot performances yielded from the evaluate_best_params function ''' with open("perfs.json", 'r') as file: data = json.load(file) items = np.array(data['items']) times = np.array(data['times']) # Filter out 0 (error) values items = items[times != 0] times = times[times != 0] unique_items = np.array(sorted(list(set(items)))) averages = np.array([np.average(times[items == i]) for i in unique_items]) plt.scatter(items, times / items, c='k', alpha=0.5, marker="x") plt.plot(unique_items, averages / unique_items, c='r') plt.xscale('log') plt.ylabel("Seconds per product") plt.xlabel("Product batch size") plt.show()
true
369a8e75aec8117acb53b183a6a822898b5079f5
Python
JSalram/Python
/Learning/2020/While.py
UTF-8
1,649
4.53125
5
[]
no_license
# Bucle While --> Bucle indefinido ## Bucle numérico i = 0 while i <= 10: print(i) i += 1 # i = i + 1 # ============================================================================================= ## Bucle booleano seguir = True i = 0 while seguir: print(i) i += 1 if i == 5: seguir = False # ============================================================================================= ## Bucle con input seguir = True while seguir: print("1. Sumar\n2. Restar\n3. Salir") opcion = int(input("Elige una opción: ")) if opcion == 1: print("Suma") elif opcion == 2: print("Resta") elif opcion == 3: seguir = False else: print("Opción incorrecta") # ============================================================================================= ## Bucle con cadenas cad = input("¿Deseas continuar? (y/n)\n") while cad != "y" and cad != "n": cad = input("Opción incorrecta. Vuelve a intentarlo (y/n)\n") # ============================================================================================= ## Bucle float (no preciso) inicial = 10.5 objetivo = 5.2 distancia = 0 while inicial > objetivo: inicial -= 0.1 distancia += 0.1 print(distancia) ### Bucle float (usando el método round()) inicial = 10.5 objetivo = 5.2 distancia = 0 while inicial > objetivo: inicial = round(inicial - 0.1, 1) distancia = round(distancia + 0.1, 1) print(distancia) ### round(numero [,decimales]) n = round(17.5) print(n) # Fallo round # --------> Lo más apropiado sería trabajar con números enteros n = round(2.5) print(n)
true
966848e20da4df9c3e4e903b353579220c32517f
Python
mmann964/exercises
/Exercise12.py
UTF-8
207
3.375
3
[]
no_license
#!/usr/bin/python def firstAndLast(b): l = [] l.append(b[0]) l.append(b[-1]) return l def firstAndLast2(b): return [ b[0], b[-1]] a = [ 5, 10, 15, 20, 32, 19 ] print firstAndLast2(a)
true
c6233d05d2cf351113ce9468e3664c3530c7409e
Python
sabariks/pythonpgm
/removevow.py
UTF-8
302
3.375
3
[]
no_license
s=input() c=list(s) vowels=['A','a','E','e','I','i','O','o','U','u'] bol=0 for i in range(0,len(s)): if c[i] in vowels: bol=1 if bol==1: print('yes') else: print('no') # abc = ''.join([l for l in c if l not in vowels]) # abc = [i for i in abc] # print(''.join(abc))
true
4042468d34ae7b26465829ca08f646b6bfbc7f3a
Python
miakastina/Cryptography
/Caesar_cipher/Python2/caesarEnc_py2.py
UTF-8
534
3.6875
4
[]
no_license
plainText = raw_input("Plaintext: ") key = int(raw_input("Key: ")) def caesar_encryption(plainText,key): encryption_str = "" for i in plainText: if i.isupper(): temp_incode = 65 + ((ord(i) - 65 + key) % 26) encryption_str = encryption_str + chr(temp_incode) elif i.islower(): temp_incode = 97 + ((ord(i) - 97 + key) % 26) encryption_str = encryption_str + chr(temp_incode) else: encryption_str = encryption_str + i print "Ciphertext:",encryption_str caesar_encryption(plainText,key)
true
847bdc6b9752905bb0477e25d573a4be1660944d
Python
davidemiro/project_bioinformatics_official
/Single-omnic analysis/myclass/ExtractClinicalCase.py
UTF-8
2,159
3.234375
3
[]
no_license
import os import json import pandas as pd """ Class to extract the clinical_case from the json file. It returns a dataframe in the following form: case_id| ...[clinical case]...| label. """ class ExtractClinicalCase: def __init__(self, cases_id): with open('./data-ready/clinical.cases_selection.2020-11-12.json', 'r') as json_file: data = json.load(json_file) remove_el = list() for el in data: if el['case_id'] not in cases_id: remove_el.append(el) for el in remove_el: data.remove(el) clinical_data = {'case_id': None, 'tumor_stage': None, 'prior_malignancy': None, 'age_at_diagnosis': None, 'morphology': None, 'cigarettes_per_day': None, 'label': '' } self.df = pd.DataFrame(data=[], columns=clinical_data.keys()) for i, el in enumerate(data): clinical_data['case_id'] = el['case_id'] clinical_data['tumor_stage'] = el['diagnoses'][0]['tumor_stage'] clinical_data['prior_malignancy'] = el['diagnoses'][0]['prior_malignancy'] if el['diagnoses'][0]['age_at_diagnosis'] is not None: value = int(el['diagnoses'][0]['age_at_diagnosis'])/365 clinical_data['age_at_diagnosis'] = self.__truncate__(value) clinical_data['morphology'] = el['diagnoses'][0]['morphology'] if el['exposures'][0]['cigarettes_per_day'] is not None: value = int(el['exposures'][0]['cigarettes_per_day']) clinical_data['cigarettes_per_day'] = value self.df = self.df.append(pd.DataFrame(clinical_data, index=[i]), ignore_index=True) def get_df_clinical_case(self): return self.df def __truncate__(self, n, decimals=-1): """ Function to take the decade of the age. """ multiplier = 10 ** decimals return int(n * multiplier) / multiplier
true
0233561f1bfc1a0f1cd2414706336eedc6361482
Python
viroee/python-code
/Desafio/TiposPremitidos/Desafios/conversor de moeda.py
UTF-8
145
3.796875
4
[]
no_license
n = float(input('Quanto dinheiro voce tem na carteira :')) n1 = n / 3.27 print('tenho {:.2f} em reais e em dolares tenho {:.2f}'.format(n, n1))
true
aa74b5c1c1e7195d6794785e8f9c0e8dbf8eacf1
Python
einarprump/ScraperGUI
/windowGUI.py
UTF-8
14,114
2.6875
3
[]
no_license
import tkinter as ttk import tkinter.filedialog as openFile from urllib import request from Eparser import Eparser class Manager(object): def __init__(self, gui): self.isWebsite = False self.parser = Eparser() self.gui = gui self.gui.listbox_frame.manager = self self.gui.input_frame.manager = self self.gui.headers_frame.manager = self def btnScrape(self, filename): if len(filename) < 5: print("Not a file or a url!") else: if filename[0:4] == ("http"): req = request.Request(filename) req.add_header('User-Agent', 'HomeMade-Browser/0.1 - In Development') getResponse = request.urlopen(req) print(getResponse.getheaders()) requestBytes = getResponse.read() requestString = requestBytes.decode("utf-8") self.parser.feed(requestString) getResponse.close() self.__create_widgets(getResponse.getheaders()) else: f = open(filename, 'r', encoding='utf-8') requestBytes = f.read() self.parser.feed(requestBytes) f.close() for t in self.parser.htmlDoc.keys(): self.gui.listbox_frame.tagListbox.insert(ttk.END, t) def __create_widgets(self, headers): r = 0 for h in headers: column = 0 ttk.Label(self.gui.headers_frame, text=h[0]).grid(row=r, column=column, sticky="W") column += 1 ttk.Label(self.gui.headers_frame, text=h[1]).grid(row=r, column=column, columnspan=5, sticky="W") r += 1 class HeadersFrame(ttk.Frame): def __init__(self, container): super().__init__(container) self.manager = None class InputFrame(ttk.Frame): def __init__(self, container): super().__init__(container) self.__create_widgets() self.manager = None def __create_widgets(self): self.urlFile = ttk.Label(self, text = "Url / File:").grid(column=0, row=0, sticky=ttk.W) self.urlFileName = ttk.StringVar() self.urlFileEntry = ttk.Entry(self, width=100, textvariable = self.urlFileName) self.urlFileEntry.focus() self.urlFileEntry.grid(column=1, row=0, sticky=ttk.W) ttk.Button(self, text="Scrape", command=self.btnScrape, underline=0, takefocus=True).grid(column=2, row=0, ipadx=3, sticky="NEWS") ttk.Button(self, text="Open file..", command=self.btnOpenFile, underline=0, takefocus=True).grid(column=3, row=0, ipadx=3, sticky="NEWS") for widget in self.winfo_children(): widget.grid(padx=5, pady=5) def btnScrape(self): self.manager.btnScrape(self.urlFileName.get()) def btnOpenFile(self): filename = openFile.askopenfilename(initialdir="/", title="Select file", filetypes=[('HTML', '*.html')]) self.urlFileName.set(filename) class ListboxFrame(ttk.Frame): def __init__(self, container): super().__init__(container) self.theWay = ttk.StringVar() self.__create_widgets() self.manager = None self.selected = { 'tag': None, 'attr': None, 'prop': None } def __create_widgets(self): # TAG SCROLL AND LISTBOX # ttk.Label(self, text = "Tags").grid(row=0, column=0, sticky=ttk.S+ttk.E+ttk.W+ttk.N) tagScrollbar = ttk.Scrollbar(self, orient=ttk.VERTICAL) tagScrollbar.grid(row=1, column=1, rowspan=8, sticky=ttk.N+ttk.S) self.tagListbox = ttk.Listbox(self, yscrollcommand=tagScrollbar.set, selectmode=ttk.SINGLE, exportselection=False) self.tagListbox.grid(row=1, column=0, rowspan=8, padx=(5,0), sticky=ttk.S+ttk.E+ttk.W+ttk.N) self.tagListbox.bind("<<ListboxSelect>>", self.updateAttrListbox) tagScrollbar['command'] = self.tagListbox.yview # ATTR SCROLL AND LISTBOX ttk.Label(self, text = "Attributes").grid(row=0, column=2, sticky=ttk.S+ttk.E+ttk.W+ttk.N) attrScrollbar = ttk.Scrollbar(self, orient=ttk.VERTICAL) attrScrollbar.grid(row=1, column=3, rowspan=8, padx=0, sticky=ttk.N+ttk.S) self.attrListbox = ttk.Listbox(self, yscrollcommand=attrScrollbar.set, selectmode=ttk.SINGLE, exportselection=False) self.attrListbox.grid(row=1, column=2, rowspan=8, padx=(10,0), sticky=ttk.S+ttk.E+ttk.W+ttk.N) self.attrListbox.bind("<<ListboxSelect>>", self.updateValueListbox) attrScrollbar['command'] = self.attrListbox.yview # PROPERY SCROLL AND LISTBOX self.columnconfigure(4, weight=2) ttk.Label(self, text="Value").grid(row=0, column=4, sticky=ttk.S+ttk.E+ttk.W+ttk.N) valueScrollbar = ttk.Scrollbar(self, orient=ttk.VERTICAL) valueScrollbar.grid(row=1, column=5, rowspan=8, sticky=ttk.N+ttk.S) self.valueListbox = ttk.Listbox(self, yscrollcommand=valueScrollbar.set, selectmode=ttk.SINGLE, exportselection=False) self.valueListbox.grid(row=1, column=4, rowspan=8, padx=(5,0), sticky=ttk.S+ttk.E+ttk.W+ttk.N) valueScrollbar['command'] = self.valueListbox.yview self.valueListbox.bind("<<ListboxSelect>>", self.updateTheData) # LABEL FOR DATA AND SELECTED TAG, ATTRIBUTE & (PROPERTY) self.theData = ttk.StringVar() self.columnconfigure(6, weight=1) self.columnconfigure(7, weight=1) ttk.Label(self, text="Traveling: ").grid(row=10, column=0, sticky=ttk.E+ttk.W+ttk.N) ttk.Label(self, textvariable=self.theWay).grid(row=10, column=1, columnspan=5, sticky=ttk.E+ttk.W+ttk.N) ttk.Label(self, text="DATA:").grid(row=11, column=0, sticky=ttk.E+ttk.W+ttk.N) self.currentData = ttk.Label(self, textvariable=self.theData).grid(row=11, column=1, columnspan=4, sticky=ttk.W+ttk.N) def updateTheData(self, e): currProp = self.valueListbox.get(self.valueListbox.curselection()) data = self.manager.parser.htmlDoc[self.selected['tag']][self.selected['attr']][currProp][0] self.theWay.set(f'{self.selected["tag"]} > {self.selected["attr"]} > {currProp}') if (len(data) > 30): self.theData.set(data[0:30]) else: self.theData.set(data) def updateAttrListbox(self, e): if self.valueListbox.size() > 0: self.valueListbox.delete(0, self.valueListbox.size()) self.theData.set("") self.theWay.set("") self.selected['tag'] = self.tagListbox.get(self.tagListbox.curselection()) self.theWay.set(self.selected['tag']) if self.attrListbox.size() > 0: self.attrListbox.delete(0, self.attrListbox.size()) for a in self.manager.parser.htmlDoc[self.selected['tag']].keys(): self.attrListbox.insert(ttk.END, a) def updateValueListbox(self, e): currSelectet = self.attrListbox.curselection() if currSelectet != (): self.selected['attr'] = self.attrListbox.get(self.attrListbox.curselection()) self.theWay.set(f'{self.selected["tag"]} > {self.selected["attr"]}') self.valueListbox.delete(0, self.valueListbox.size()) for a in self.manager.parser.htmlDoc[self.selected['tag']][self.selected['attr']].keys(): self.valueListbox.insert(ttk.END, a) class TreeView(ttk.Frame): def __init__(self, container): super().__init__(container) def initialize(self, data): print("INITIALIZING...") tree = ttk.Treeview(self) tree.heading('#0', text='Scraped', anchor=ttk.W) tree.insert('', ttk.END) class OptionsFrame(ttk.Frame): TAGENTRY = "Ex: span,li,..." ATTRENTRY = "Ex: rel,alt,..." def __init__(self, container): super().__init__(container) self.__create_widgets() self.manager = None def __create_widgets(self): ttk.Label(self, text="Normal scraping behavior is that all tags are harvested,\n to change that check tags and/or attributes to harvest.", anchor="e").grid(row=0, column=0, columnspan=2, sticky="WENS") # TAGS TO SCRAPE # self.head = ttk.StringVar() self.img = ttk.StringVar() self.a = ttk.StringVar() self.div = ttk.StringVar() self.meta = ttk.StringVar() self.addTag = ttk.StringVar() self.addTag.set(self.TAGENTRY) tagLabelFrame = ttk.LabelFrame(self, text="Tags", borderwidth=1) tagLabelFrame.grid(row=1, column=0, pady=10, padx=10, ipadx=5, ipady=5, sticky="NEWS") ttk.Checkbutton(tagLabelFrame, text="head", variable=self.head, onvalue="head", offvalue="").grid(row=0, column=0, pady=(1,0), padx=(5,0), sticky="W") ttk.Checkbutton(tagLabelFrame, text="img", variable=self.img, onvalue="img", offvalue="").grid(row=1, column=0, pady=(1,0), padx=(5,0), sticky="W") ttk.Checkbutton(tagLabelFrame, text="a", variable=self.a, onvalue="a", offvalue="").grid(row=2, column=0, pady=(1,0), padx=(5,0), sticky="W") ttk.Checkbutton(tagLabelFrame, text="div", variable=self.div, onvalue="div", offvalue="").grid(row=0, column=1, pady=(1,0), padx=(5,0), sticky="W") ttk.Checkbutton(tagLabelFrame, text="meta", variable=self.div, onvalue="div", offvalue="").grid(row=1, column=1, pady=(1,0), padx=(5,0), sticky="W") self.tagEntry = ttk.Entry(tagLabelFrame, textvariable=self.addTag) self.tagEntry.grid(row=3, column=0, columnspan=2, padx=(5,0), pady=(8,0), sticky="NSEW") ttk.Button(tagLabelFrame, command=self.do_nothing, text="Add").grid(row=3, column=3, pady=(8,0), sticky="NSEW") self.tagEntry.bind("<FocusIn>", lambda event, arg=self.addTag: self.clean_example_text(event, arg)) self.tagEntry.bind("<FocusOut>", lambda event, arg=[self.addTag, self.TAGENTRY]: self.check_entry(event, arg)) self.class_atr = ttk.StringVar() self.id_attr = ttk.StringVar() self.style = ttk.StringVar() self.href = ttk.StringVar() self.addAttr = ttk.StringVar() self.addAttr.set(self.ATTRENTRY) attrLabelFrame = ttk.LabelFrame(self, text="Attributes", borderwidth=1) attrLabelFrame.grid(row=1, column=1, pady=10, padx=10, ipadx=5, ipady=5, sticky="NEWS") ttk.Checkbutton(attrLabelFrame, text="class", variable=self.class_atr, onvalue="head", offvalue="").grid(row=0, column=0, pady=(1,0), padx=(5,0), sticky="W") ttk.Checkbutton(attrLabelFrame, text="id", variable=self.id_attr, onvalue="id", offvalue="").grid(row=1, column=0, pady=(1,0), padx=(5,0), sticky="W") ttk.Checkbutton(attrLabelFrame, text="style", variable=self.style, onvalue="style", offvalue="").grid(row=2, column=0, pady=(1,0), padx=(5,0), sticky="W") ttk.Checkbutton(attrLabelFrame, text="href", variable=self.href, onvalue="href", offvalue="").grid(row=0, column=1, pady=(1,0), padx=(5,0), sticky="W") self.attrEntry = ttk.Entry(attrLabelFrame, textvariable=self.addAttr) self.attrEntry.grid(row=3, column=0, columnspan=2, padx=(5,0), pady=(8,0), sticky="NSEW") ttk.Button(attrLabelFrame, command=self.do_nothing, text="Add").grid(row=3, column=3, pady=(8,0), sticky="NSEW") self.attrEntry.bind("<FocusIn>", lambda event, arg=self.addAttr: self.clean_example_text(event, arg)) self.attrEntry.bind("<FocusOut>", lambda event, arg=[self.addAttr, self.ATTRENTRY]: self.check_entry(event, arg)) def check_entry(self, event, arg): if len(arg[0].get()) == 0: arg[0].set(arg[1]) def do_nothing(self): print("DO NOTHING!") def clean_example_text(self, e, arg): arg.set('') class ApplicationGUI(ttk.Tk): ''' classdocs ''' def __init__(self): super().__init__() self.title("ScraperGUI - v0.1") self.resizable(width = False, height = False) #self.geometry("700x700") self.attributes('-toolwindow', True) self.columnconfigure(1, weight=1) self.rowconfigure(1, weight=1) self.__create_widgets() def __create_widgets(self): self.__create_menu() self.input_frame = InputFrame(self) self.input_frame.grid(column=0, row=0, columnspan=4, sticky="WENS") self.listbox_frame = ListboxFrame(self) self.listbox_frame.grid(column=0, row=1, sticky="WENS", pady=(0, 5), padx=5) self.headers_frame = HeadersFrame(self) self.headers_frame.grid(column=0, row=2, sticky="WENS") self.options_frame = OptionsFrame(self) self.options_frame.grid(column=1, row=0, rowspan=8, sticky=ttk.W+ttk.E, padx=(0,15)) def __create_menu(self): menubar = ttk.Menu(self) file_menu = ttk.Menu(menubar, tearoff=0) file_menu.add_command(label="New", command=self.do_nothing) file_menu.add_command(label="Open", command=self.do_nothing) file_menu.add_command(label="Save", command=self.do_nothing) file_menu.add_command(label="Save as...", command=self.do_nothing) file_menu.add_command(label="Export Scrape", command=self.do_nothing) file_menu.add_separator() file_menu.add_command(label="Quit", command=self.quit) menubar.add_cascade(label="File", menu=file_menu) edit_menu = ttk.Menu(menubar, tearoff=0) edit_menu.add_command(label="Find", command=self.do_nothing) edit_menu.add_separator() edit_menu.add_command(label="Configure Headers", command=self.do_nothing) edit_menu.add_command(label="Configure Scraper", command=self.do_nothing) menubar.add_cascade(label="Edit", menu=edit_menu) help_menu = ttk.Menu(menubar, tearoff=0) help_menu.add_command(label="About", command=self.do_nothing) menubar.add_cascade(label="Help", menu=help_menu) self.config(menu=menubar) def do_nothing(self): print("NOTHING!") if __name__ == "__main__": app = ApplicationGUI() Manager(app) app.mainloop()
true
2b987aa80955278c92f003e476e8388133face4e
Python
luoshao23/Data_Structure_and_Algorithm_in_Python
/ch05_Array/insertion_sort.py
UTF-8
740
3.984375
4
[]
no_license
def insertion_sort(A): """ input: An array A of n comparable elements output: The array with elements rearranged in nondecreasing order for k from 1 to n - 1 do insert A[k] at its proper location within A[0:k+1] """ # for k in range(1, len(A)): # for i in range(k-1, -1, -1): # if A[i + 1] < A[i]: # A[i+1], A[i] = A[i], A[i+1] # else: # break for k in range(1, len(A)): tmp = A[k] j = k while j > 0 and A[j-1] > tmp: A[j] = A[j-1] j -= 1 A[j] = tmp if __name__ == "__main__": lst = [5, 4, 7, 1, 12, 13, 23, 0, 4] print(lst) insertion_sort(lst) print(lst)
true
19b848843e345ec1349e09f9f1abb3a65d62dbd1
Python
boswald314/RDToUPD
/helpers.py
UTF-8
587
2.515625
3
[]
no_license
import struct import random from constants import * def packHeader(source,dest,seq,ack,ackf=1,synf=0,finf=0,rstf=0): ''' header structure: ---------32 bits------------ --source port----dest port-- ------sequence number------- ---------ack number--------- -flags (4 bits)- ''' header = struct.pack(HFORMAT, source, dest, seq, ack, ackf, synf, finf, rstf) return header def unpackHeader(data): header = data[:HEADERLENGTH] header = struct.unpack(HFORMAT, header) return header def getISN(): return random.randrange(SEQMAX) class FINReceived(Exception): pass
true
ea470afe115e691e4d3cbdcdc3ab9f918d9f7965
Python
sjuvekar/Kaggle-Dato
/src/archive/passive_aggressive_model.py
UTF-8
444
2.5625
3
[]
no_license
from base_model import BaseModel from sklearn.linear_model import PassiveAggressiveClassifier from scipy.stats import logistic class PassiveAggressiveModel(BaseModel): def __init__(self, cached_features): BaseModel.__init__(self, cached_features) self.model = PassiveAggressiveClassifier(loss='squared_hinge', C=1.0, random_state=1) def _predict_internal(self, X_test): return self.model.predict(X_test)
true
dd1f315475bc7256ba78e128b561c20586e4a74b
Python
hubward/ansible-softlayer
/softlayer_ssh_keys.py
UTF-8
4,805
2.5625
3
[ "Apache-2.0" ]
permissive
#!/usr/bin/python # -*- coding: utf-8 -*- DOCUMENTATION = ''' --- module: softlayer-vs short_description: Maintains a list of SSH Keys in SoftLayer Public Cloud Account description: - Maintains a list of SSH Keys in SoftLayer Public Cloud Account - Labels and keys alone needs to be unique along the list requirements: - Requires SoftLayer python client - Requires Ansible options: api_key: description: - SoftLayer API Key default: null sl_username: description: - SoftLayer username default: null keys_to_check_in: description: - List of ssh key dicts i.e. label:value, key:value default: {} author: scoss notes: - Instead of supplying api_key and username, .softlayer or env variables - Example: - --- - api_key: 311dc0503fa17c8284c0094876dd8b74d605c43354fgd5cf343c7cc5b27005 - sl_username: user1 - keys_to_check_in: - - key: ssh-rsa 121211311414141414... - label: key1 - - key: ssh-rsa AAAAB3NzaC1yc2EAAA... - label: key2 ''' from ansible.module_utils.basic import * import SoftLayer import sys import logging import time from softlayer_vs_basic.py import SLClientConfig class SshKeysConfig(object): def __init__(self, ansible_config): self.ssh_keys = ansible_config.get("ssh_keys") for ssh_key in self.ssh_keys: label = ssh_key.get("label") key = ssh_key.get("key") if label is None or label == "": raise ValueError("No label provided for key {}".format(key)) if key is None or key == "": raise ValueError("No key provided for label {}".format(label)) for ssh_key2 in self.ssh_keys: if ssh_key2 is ssh_key: continue if ssh_key2["label"] == ssh_key["label"]: raise ValueError("label {} is not unique".format(ssh_key2["label"])) if ssh_key2["key"] == ssh_key["key"]: raise ValueError("ssh_key {} is not unique".format(ssh_key2["key"])) @staticmethod def arg_spec(): return dict( ssh_keys = dict(type = 'list') ) def sl_keys_to_delete(self, sl_keys): return filter( lambda sl_ssh_key: self._doesnt_have_key(sl_ssh_key, self.ssh_keys), sl_keys) def config_keys_to_add(self, sl_keys): return filter( lambda config_ssh_key: self._doesnt_have_key(config_ssh_key, sl_keys), self.ssh_keys) def _doesnt_have_key(self, key_to_check, keys_list): for ssh_key in keys_list : if ssh_key["label"] == key_to_check["label"] and \ ssh_key["key"] == key_to_check["key"]: return False return True class SshKeys(object): _mba_note = "maintained by ansible" def __init__(self, sl_client, keys_config): self._sl_ssh_keys_manager = SoftLayer.SshKeyManager(sl_client) self._kc = keys_config def sync_config(self): try: sl_keys = self._keys_maintained_by_ansible() sl_keys_to_delete = self._kc.sl_keys_to_delete(sl_keys) for ssh_key in sl_keys_to_delete: self._sl_ssh_keys_manager.delete_key(ssh_key["id"]) config_keys_to_add = self._kc.config_keys_to_add(sl_keys) for ssh_key in config_keys_to_add: self._sl_ssh_keys_manager.add_key(ssh_key["key"], ssh_key["label"], SshKeys._mba_note) except Exception as e: raise SSHKeyException(str(e)) return len(sl_keys_to_delete) != 0 or len(config_keys_to_add) != 0 def _keys_maintained_by_ansible(self): sl_keys = self._sl_ssh_keys_manager.list_keys() return filter( lambda ssh_key: True if ssh_key.get("notes") == SshKeys._mba_note else False, sl_keys) class SSHKeyException(Exception): def __init__(self, msg): self._msg = msg def __str__(self): return "Exception: {}, MSG: {}".format(type(self), self._msg) def msg(self): return self._msg def main(): module_helper = AnsibleModule( argument_spec = dict( SLClientConfig.arg_spec().items() + SshKeysConfig.arg_spec().items() ) ) sl_client_config = SLClientConfig(module_helper.params) sl_client = SoftLayer.Client(username=sl_client_config.sl_username, api_key=sl_client_config.api_key) try: ssh_keys = SshKeys(sl_client, SshKeysConfig(ansible_config=module_helper.params)) module_helper.exit_json(changed=ssh_keys.sync_config()) except Exception as se: module_helper.fail_json(msg=str(se)) main()
true
74bd81dd59a5f56449a40f18cc2b902ed43e48a6
Python
gabriellaec/desoft-analise-exercicios
/backup/user_160/ch88_2019_11_26_18_41_22_499622.py
UTF-8
456
3.4375
3
[]
no_license
class Retangulo (): def __init__(self, ponto1, ponto2): self.ponto1_x = ponto1.x self.ponto1_y = ponto1.y self.ponto2_x = ponto2.x self.ponto2_y = ponto2.y self.lado_x = abs(ponto2.x - ponto1.x) self.lado_y = abs(ponto2.y - ponto1.y) def calcula_perimetro(self): return (self.lado_x) * 2 + (self.lado_y) * 2 def calcula_area(self): return self.lado_x * self.lado_y
true
eae2e265f8f3e43e4d12769a41bffa1fe086b900
Python
jzeng1/PythonProject
/Word Search/proj2.py
UTF-8
11,196
3.78125
4
[]
no_license
# File: proj2.py # Author: Jason Zeng # Date: 6/10/2017 # Section: 22 # E-mail: jzeng1@umbc.edu # Description: This Program will find the word inside the txt file print out each word that was found or not. # If the word is found the coordinate for the word is shown, also the direction. #Constant that i am gonna use in the reccusive portion of the program also #the sense of direction for the function updatedCoordinates() UP_DIRECTION = 'up' DOWN_DIRECTION = 'down' LEFT_DIRECTION = 'backwards left' RIGHT_DIRECTION = 'right' UPPER_RIGHTCORNER = 'diagonally up and right' UPPER_LEFTCORNER = 'diagonally up and left' BOTTOM_LEFTCORNER = 'diagonally down and left' BOTTOM_RIGHTCONER = 'diagonally down and right' NOT_FOUND = 'None' ######################################################################## # printGreetings() print the greetings of the program # # # # Input: NONE # # Output: NONE it prints the greetings # def printGreetings(): print("Welcome to the Word Search") print("For this, you will import two files: ") print() print("\t1. The word search grid") print("\t2. The word list") print() ######################################################################## # fileIoAccessWord() opens the txt file and prints out the board # # # # Input: NONE # # Output: the board; 1d list # def fileIoAccessWord(): #This list contain all of the words listWords = [] inputWord = input("What word list file would you like to import?: ") wordToFind = open(inputWord, "r") #going through wordToFind so that it can be split then appended to a list for eachLine in wordToFind: theWord = eachLine.strip() listWords.append(theWord) #This closes the file wordToFind.close() print() return listWords ######################################################################## #findFirstLetter() finds the first letter of the word in the board of # # of the given word # # # # Input: 2D,1D list theBoard,theWord # # Output: 2D List of the coordinate # def findFirstLetter(theBoard,theWord): #This make it a 2d list of the coordinates startList =[] for row in range(len(theBoard)): for col in range(len(theBoard)): if theBoard[row][col] == theWord[0]: coordinatePoints = (row,col) startList.append(list(coordinatePoints)) return startList ##################################################################################### #checkTheDirection() This is a key that allow me to keep track of what direction # # the word is found printing out if the puzzle is going up down, # # left,right etc. # # # # Input: string of the direction like U,D,L,R,UR etc. # # Output: return a string from the constant that i assigned it to. # def checkTheDirection(directionOfFound): #if directionOfFound is U then it return up as a string it goes the same for the code below. if directionOfFound == 'U': return UP_DIRECTION elif directionOfFound == 'D': return DOWN_DIRECTION elif directionOfFound == 'L': return LEFT_DIRECTION elif directionOfFound == 'R': return RIGHT_DIRECTION elif directionOfFound == 'UR': return UPPER_RIGHTCORNER elif directionOfFound == 'UL': return UPPER_LEFTCORNER elif directionOfFound == 'BL': return BOTTOM_LEFTCORNER elif directionOfFound == 'BR': return BOTTOM_RIGHTCONER else: return NOT_FOUND ######################################################################################################## #checkForWord() This uses recursion, checking the letter surrounding # # The first letter of the word given from the coordinate # # # # Input: 2D list theBoard, row(int), column(int), numRows(int), numCols(integer), direction(string) # # Output: A boolen wordFound, a string of the currentDirection # def checkForWord(board, row, column, word, numRows, numCols, direction): currentDirection = 'None' wordFound = False #This is the base case where the program is gonna stop i have it return a boolen. if (row > (numRows - 1) or column > (numCols - 1) or row< 0 or column < 0): return False #This goes through every single character in the puzzle and if it equal the first letter of the word #it has found the location of where it should check the 8 direction. elif (board[row][column] == word[0]): #if the length of the word is 1 then it has found the word and return a boolen which it true if(len(word) == 1): return (True, currentDirection) #The direction is going up #this pretty much applies for the code down below of the 8 diffrent direction duplicate code. if (not wordFound and row - 1 >= 0 and column >= 0 and (direction == 'START' or direction == 'U')): #This is the recursive case for the direction going up the word[1:] slices the word #when the letter of the word is found. the row - 1 indicate the direction which is up. wordFound, currentDirection = checkForWord(board, row - 1, column , word[1:], numRows, numCols, 'U') #This is the key indicating which direction it goes, but for this case it up. currentDirection = 'U' #The direction going down if (not wordFound and row + 1 <= (numRows - 1) and column >= 0 and (direction == 'START' or direction == 'D')): wordFound, currentDirection = checkForWord(board, row + 1, column , word[1:], numRows, numCols, 'D') currentDirection = 'D' #The direction going Left if (not wordFound and row >= 0 and column - 1 >= 0 and (direction == 'START' or direction == 'L')): wordFound, currentDirection = checkForWord(board, row , column - 1, word[1:], numRows, numCols, 'L') currentDirection = 'L' #The direction going right if (not wordFound and row >= 0 and column + 1 <= (numCols - 1)and (direction == 'START' or direction == 'R')): wordFound, currentDirection = checkForWord(board, row, column + 1, word[1:], numRows, numCols, 'R') currentDirection = 'R' #The direction going diagonally up right if (not wordFound and (row - 1) >= 0 and (column + 1) <= (numCols - 1) and (direction == 'START' or direction == 'UR')): wordFound, currentDirection = checkForWord(board, row - 1, column + 1, word[1:], numRows, numCols, 'UR') currentDirection = 'UR' #The direction going diagonally up left if (not wordFound and row - 1 >= 0 and column - 1 >= 0 and (direction == 'START' or direction == 'UL')): wordFound, currentDirection = checkForWord(board, row - 1, column - 1, word[1:], numRows, numCols, 'UL') currentDirection = 'UL' #The direction going diagonally down left if (not wordFound and row + 1 <= (numRows - 1) and column - 1 >= 0 and (direction == 'START' or direction == 'BL')): wordFound, currentDirection = checkForWord(board, row + 1, column - 1, word[1:], numRows, numCols, 'BL') currentDirection = 'BL' #The direction going diagonally down right if (not wordFound and row + 1 <= (numRows - 1) and column + 1 <= (numCols - 1)and (direction == 'START' or direction == 'BR')): wordFound, currentDirection = checkForWord(board, row + 1, column + 1, word[1:], numRows, numCols, 'BR') currentDirection = 'BR' return (wordFound, currentDirection) ############################################################################## #updatedCoordinates() This function print the word,coordinate and # # the direction it goes. checking if the word is found # # or not. # # Input: 2D list theBoard, word string, coord list of coordinate # # Output: NONE gonna print out the word coordinate and the direction it goes # def updatedCoordinates(listBoard,myWord,numRows,numColumns): #This initializes the indexRows and indexColumns indexRows = 0 indexColumns = 0 startingDirection = 'Starting' #This variable is so that I can make sure that wordFound is false #when its running till the word is found. wordFound = False for indexRows in range (0,numRows): for indexColumns in range (0, numColumns): #The function checkForWord is called value are return like this #(wordFound,startingDirection) are (false,U) wordFound,startingDirection = checkForWord(listBoard, indexRows, indexColumns, myWord, numRows, numColumns ,'START') if wordFound == True: print("The word " + myWord + " starts in " + str(indexRows) + ", " + str(indexColumns) + " and goes " + checkTheDirection(startingDirection)) return if wordFound == False: print("The word "+ str(myWord) + " does not appear in the puzzle.") def main(): printGreetings() #This list contains value inside the board creating a 2d list listBoard = [] #This variable is to put numRows and numColumns before the assignment so i just made it equal to zero. numRows = 0 numColumns = 0 #This open the p.txt file inputSearch = input("What word search grid file would you like to import?: ") wordBoard = open(inputSearch, "r") #going through wordBoard so that it can be split then appended to a list for eachLine in wordBoard: theBoard = eachLine.split() listBoard.append(list(theBoard)) #both of these check the length and width of the puzzle later gonna be used for the bounds. numColumns = len(theBoard) numRows = len(theBoard) #This closes the file for p.txt wordBoard.close() #Function call from fileIoAccessWord to get the 1D list of words. theWord = fileIoAccessWord() #Iterating through theWord for myWord in theWord: #The function is called in updatedCoordinates updatedCoordinates(listBoard,myWord,numRows,numColumns) main()
true
38a741af9274d326241911515f7dd8bf81a72a87
Python
pmacdougal/aioMonitor
/main.py
UTF-8
342
2.59375
3
[ "MIT" ]
permissive
#!/usr/bin/env python3 from socket import gethostname from monitor.monitor import Monitor def main(): monitor = Monitor() hostname = gethostname() monitor.run(f'start Home from {hostname}', 'h.mqtt', '192.168.2.30') # This is not a module, so run the main routine when executed if __name__ == '__main__': main()
true
75b5b18a067a8769ec6ae7fc702b0a2b35e7368b
Python
fauske/raspiGrow
/relayBoot.py
UTF-8
174
2.546875
3
[]
no_license
import RPi.GPIO as GPIO GPIO.setwarnings(False) GPIO.setmode(GPIO.BCM) pinList= [22, 23, 24, 25] for i in pinList: GPIO.setup(i, GPIO.OUT) GPIO.output(i, GPIO.HIGH)
true