blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
133
path
stringlengths
2
333
src_encoding
stringclasses
30 values
length_bytes
int64
18
5.47M
score
float64
2.52
5.81
int_score
int64
3
5
detected_licenses
listlengths
0
67
license_type
stringclasses
2 values
text
stringlengths
12
5.47M
download_success
bool
1 class
4dd3ea070b60fb368bf102c6f2617c37d806c66f
Python
1superwen/VIPtest-base
/面向对象新/类的外面添加和获取属性.py
UTF-8
440
4.40625
4
[]
no_license
#1-定义一个类 class Washer(): #属性 #方法 def wash(self): print(self) print('我会洗⾐衣服') #2-创建对象:对象名 = 类名() haier1 = Washer() #3-调用类中的方法:对象名.方法名() haier1.wash() print(haier1) #添加属性 haier1.width = 500 haier1.height = 800 #获取属性 print(f'haier1的宽度是:{haier1.width}') print(f'haier1的高度是:{haier1.height}')
true
189bc4466873964d9aa565bee076b93536c99522
Python
mihaip/infinite-mac
/scripts/stickies.py
UTF-8
5,379
2.53125
3
[ "Apache-2.0" ]
permissive
from __future__ import annotations import dataclasses import datetime import enum import struct import typing class Font(enum.Enum): CHICAGO = 0x0000 GENEVA = 0x0001 HELVETICA = 0x0015 COURIER = 0x0016 MONACO = 0x0004 NEW_YORK = 0x0002 PALATINO = 0x0010 SYMBOL = 0x0017 TIMES = 0x0014 ZAPF_DINGBATS = 0x000D OSAKA = 0x4000 class Style(enum.Enum): BOLD = 0x01 ITALIC = 0x02 UNDERLINE = 0x04 OUTLINE = 0x08 SHADOW = 0x10 CONDENSED = 0x20 EXTENDED = 0x40 class Color(enum.Enum): YELLOW = 0x0000 BLUE = 0x0001 GREEN = 0x0002 PINK = 0x0003 PURPLE = 0x0004 GRAY = 0x0005 BLACK_WHITE = 0xFFFF # 2082844800 is the number of seconds between the Mac epoch (January 1 1904) # and the Unix epoch (January 1 1970). See # http://justsolve.archiveteam.org/wiki/HFS/HFS%2B_timestamp MAC_EPOCH_OFFSET = 2082844800 @dataclasses.dataclass class Sticky: top: int # 2 bytes left: int # 2 bytes bottom: int # 2 bytes right: int # 2 bytes unknown: int = 0 # 8 bytes creation_date: datetime.datetime = datetime.datetime.now() # 4 bytes modification_date: datetime.datetime = datetime.datetime.now() # 4 bytes font: Font = Font.GENEVA # 2 bytes size: int = 9 # 1 byte style: typing.Set[Style] = dataclasses.field(default_factory=set) # 1 byte color: Color = Color.YELLOW # 2 bytes text: str = "" # 2 bytes of length + data # Skip this note when generating the TextText version of Stickies. skip_in_ttxt: bool = False skip_in_stickies: bool = False STRUCT = struct.Struct(">hhhh Q LL HBBH H") @staticmethod def from_bytes(data: bytes, encoding: str = "mac_roman") -> Sticky: (top, left, bottom, right, unknown, creation_date_raw, modification_date_raw, font, size, style_raw, color, text_length) = Sticky.STRUCT.unpack(data[:32]) creation_date = datetime.datetime.fromtimestamp(creation_date_raw - MAC_EPOCH_OFFSET) modification_date = datetime.datetime.fromtimestamp( modification_date_raw - MAC_EPOCH_OFFSET) style = set() for s in Style: if style_raw & s.value: style.add(Style(s)) text = data[32:32 + text_length].decode(encoding) return Sticky(top, left, bottom, right, unknown, creation_date, modification_date, Font(font), size, style, Color(color), text), 32 + text_length def to_bytes(self, encoding: str = "mac_roman") -> bytes: creation_date_raw = int( self.creation_date.timestamp()) + MAC_EPOCH_OFFSET modification_date_raw = int( self.modification_date.timestamp()) + MAC_EPOCH_OFFSET style_raw = 0 for s in self.style: style_raw |= s.value text_raw = self.text.replace("\n", "\r").encode(encoding) text_length = len(text_raw) return Sticky.STRUCT.pack( self.top, self.left, self.bottom, self.right, self.unknown, creation_date_raw, modification_date_raw, self.font.value, self.size, style_raw, self.color.value, text_length) + text_raw @dataclasses.dataclass class StickiesFile: header: int = 0x00030003 # 4 bytes stickies: typing.List[Sticky] = dataclasses.field( default_factory=list) # 2 bytes of length + data STRUCT = struct.Struct(">L H") @staticmethod def from_bytes(data: bytes, encoding: str = "mac_roman") -> StickiesFile: stickies = [] header, sticky_count = StickiesFile.STRUCT.unpack(data[:6]) offset = 6 for i in range(sticky_count): sticky, sticky_size = Sticky.from_bytes(data[offset:], encoding) stickies.append(sticky) offset += sticky_size return StickiesFile(header, stickies) def to_bytes(self, encoding: str = "mac_roman") -> bytes: stickies_raw = b"" count = 0 for sticky in self.stickies: if sticky.skip_in_stickies: continue stickies_raw += sticky.to_bytes(encoding) count += 1 return StickiesFile.STRUCT.pack(self.header, count) + stickies_raw def to_ttxt_bytes(self, encoding: str = "mac_roman") -> bytes: text = "" for sticky in reversed(self.stickies): if sticky.skip_in_ttxt: continue if text: text += "\r\r" text += sticky.text.replace("\n", "\r") return text.encode(encoding) def generate_placeholder() -> bytes: text = "Placeholder text" for i in range(1000): text += f" sticky {i}" timezone = datetime.timezone(datetime.timedelta(hours=13)) placeholder_date = datetime.datetime(1984, 1, 24, tzinfo=timezone) sticky = Sticky(top=10, left=10, bottom=50, right=50, creation_date=placeholder_date, modification_date=placeholder_date, text=text) return StickiesFile(stickies=[sticky]).to_bytes() def generate_ttxt_placeholder() -> bytes: text = "Placeholder text" for i in range(1000): text += f" text {i}" return text.encode("mac_roman")
true
4c06733c1730febe69c8732e09ef0f75a7f38470
Python
Lohithayenugu/BigDataProgramming
/Lab2/Task3/TwitterStreamming.py
UTF-8
657
2.65625
3
[]
no_license
import os from pyspark import SparkContext from pyspark.streaming import StreamingContext from collections import namedtuple os.environ["SPARK_HOME"] = "C:\Spark" def main(): sc = SparkContext(appName="WordCount") ssc = StreamingContext(sc, 5) lines = ssc.socketTextStream("localhost", 6000) fields = ("word", "count") Tweet = namedtuple('Text', fields) counts = lines.flatMap(lambda text: text.split(" "))\ .map(lambda word: (word, 1))\ .reduceByKey(lambda x, y: x + y).map(lambda rec: Tweet(rec[0], rec[1])) counts.pprint() ssc.start() ssc.awaitTermination() if __name__ == "__main__": main()
true
83b40e29e80f0329d7e7bb0a02d8cf828f52d368
Python
MoonriseStreet/BattleShip
/BattleShip/strategy/DefendStrategy.py
UTF-8
541
3.125
3
[]
no_license
from strategy.Strategy import Strategy from random import randrange from const import MEDIUM class DefendStrategy(Strategy): def __init__(self): super().__init__() self.time = 0 def on_update(self, delta_time, info: list): self.time += delta_time if self.time >= MEDIUM: self.time = 0 return self.new_unit(info[0]) return None def new_unit(self, money) -> int: if int(money) > 0: return randrange(100) % 2 else: return 0
true
c49f735fd04b3100f016c3eea3f5115ae60178d8
Python
shardcore/skypebot
/commands/commandscratch.py
UTF-8
1,052
2.9375
3
[]
no_license
# coding=UTF-8 from string import Template import random ## Copypaste this example command to create new scratch commands ## Leave this one alone so new commands can be made from it. class ExampleCommand(object): def __init__(self): self.templates = [ Template("lobs a dolphin at $name.") ] def execute( self, message ): name = message.FromDisplayName template = random.choice( self.templates ) message_out = template.substitute(name=name) return "/me %s" % message_out ## !satan command below here - GP class SatanCommand(object): def __init__(self): self.templates = [ Template("considers the infomorph aesthetic."), Template("draws a martini."), Template("wears a nice hat."), Template("cooks up a nice batch of wasabi dumplings."), Template("shuffles to some botstep."), Template("rolls up.") ] def execute( self, message ): name = message.FromDisplayName template = random.choice( self.templates ) message_out = template.substitute(name=name) return "/me %s" % message_out
true
425e10755ab29d21a10e99ef638e341df33b4ce8
Python
2Charles/correlation-analysis
/corrlab.py
UTF-8
15,491
3.015625
3
[]
no_license
#-*- coding:utf-8 -*- import pandas as pd import re import matplotlib.pylab as plt import seaborn as sns import os import gc class corrAna(object): '''need to input three parameters to initialize, type controls rolling or aggravated 0 for rolling, 1 for aggravated; level : 0 for major option, 1 for secondary, 2 for third ''' def __init__(self, filedir, start_date, end_date, type, level = 0): self.filedir = filedir self.start_date = start_date self.end_date = end_date self.type = type self.level = level self.symbolDict = {} def generateDayLst(self, start=None, end=None ): if start == None: days = pd.date_range(start=self.start_date, end=self.end_date, freq='B') else: days = pd.date_range(start=start, end=start, freq='B') dayLst = [] for day in days: temp = day.strftime('%Y-%m-%d').split('-') day = temp[0]+temp[1]+temp[2] dayLst.append(day) return dayLst def loaddata(self, day, split = 2): '''only load single day split controls split one sec into how many parts''' if type(day) == type('a'): dir = self.filedir + day + '.dat.gz' if type(day) == type(1): dir = self.filedir + str(day) + '.dat.gz' temp = pd.read_csv(dir, header=None, index_col=0, compression='gzip', names=['ticker', 'bid_price', 'bid_volume', 'ask_price', 'ask_volume', 'last_price', 'last_volume', 'open_interest', 'turnover']) self.timeIndex(temp, day, split=split) temp.sort_index(inplace=True) if split == 2: timerange1 = pd.date_range(day+' 09', day+' 11:30', freq = '500ms') timerange2 = pd.date_range(day + ' 13:30', day + ' 15', freq='500ms') elif split == 4: timerange1 = pd.date_range(day + ' 09', day + ' 11:30', freq='250ms') timerange2 = pd.date_range(day + '13:30', day + ' 15', freq='250ms') flag = map(lambda x: (x in timerange1) or (x in timerange2), temp.index.values) temp = temp[flag] return temp def timeIndex(self, df, date, split = 2): '''trim time into 500ms or 250ms and change it into timeseries and set as index''' lst = list(df.index.values) year, month, day = date[:4],date[4:6],date[6:] res = [] for time in lst: s = re.split(r'[:.]', time) if split == 2: if int(s[-1]) <= 500: s = s[0] + ':' + s[1] + ':' + s[2] + '.' + '500' elif int(s[-1]) < 1000: s[-2] = str(int(s[-2]) + 1) if int(s[-2]) == 60: s[-3] = str(int(s[-3]) + 1) s[-2] = '00' if int(s[-3]) == 60: s[-3] = '00' s[-4] = str(int(s[-4]) + 1) elif len(s[-2]) == 1: s[-2] = '0' + s[-2] s = s[0] + ':' + s[1] + ':' + s[2] + '.' + '000' elif split == 4: if int(s[-1]) <= 250: s = s[0] + ':' + s[1] + ':' + s[2] + '.' + '250' elif int(s[-1]) <= 500: s = s[0] + ':' + s[1] + ':' + s[2] + '.' + '500' elif int(s[-1]) <= 750: s = s[0] + ':' + s[1] + ':' + s[2] + '.' + '750' elif int(s[-1]) < 1000: s[-2] = str(int(s[-2]) + 1) if int(s[-2]) == 60: s[-3] = str(int(s[-3]) + 1) s[-2] = '00' if int(s[-3]) == 60: s[-3] = '00' s[-4] = str(int(s[-4]) + 1) elif len(s[-2]) == 1: s[-2] = '0' + s[-2] s = s[0] + ':' + s[1] + ':' + s[2] + '.' + '000' s = year + '-' + month + '-' + day + ' ' + s res.append(s) df.index = pd.DatetimeIndex(res) def filterdata(self, df, lst, threshold = 1000): '''lst is a list of option that want to keep from raw dataframe''' if self.type == 2: # return both rolling and aggravated align_base = self.get_align_base(df) res = pd.DataFrame() for name in lst: temp = df[df['ticker'] == name] if temp.shape[0] < threshold: continue else: self.calcAll(temp) temp = temp.rename(columns={'aggravated_return': name[:2] + str(self.level)+'_agg', 'rolling_return': name[:2] + str(self.level)+'_rolling'}) temp = pd.DataFrame(temp.loc[:, [name[:2] + str(self.level)+'_agg', name[:2] + str(self.level)+'_rolling']]) temp = self.align_drop(data=temp, base=align_base) res = pd.concat([res, temp], axis=1) else: if self.type == 1: keywd = 'aggravated_return' else: keywd = 'rolling_return' align_base = self.get_align_base(df) res = pd.DataFrame() for name in lst: temp = df[df['ticker'] == name] if temp.shape[0] < threshold: continue else: self.calcAll(temp) temp = temp.rename(columns={keywd: name[:2]+str(self.level)}) temp = pd.DataFrame(temp.loc[:, name[:2]+str(self.level)]) temp = self.align_drop(data=temp, base=align_base) res = pd.concat([res, temp], axis=1) res.fillna(method='ffill', axis=0, inplace=True) res.fillna(method='bfill', axis=0, inplace=True) return res def concatdata(self, dayLst, filterLst = 'major', split = 2): '''load multidays and filter and concat together split means split one second into how many parts, choose from [2,4]''' if len(dayLst) == 1: symbolKey = dayLst[0] else: symbolKey = dayLst[0]+'-'+dayLst[-1] temp = self.loaddata(day=dayLst[0], split=split) if filterLst == 'major': major = self.findMostInType(temp) self.recordSymbol(symbolKey, major) filterLst = major.values() res = self.filterdata(temp, lst=filterLst) del temp; gc.collect() if len(dayLst) > 1: for day in dayLst[1:]: temp = self.loaddata(day=day, split = split) major = self.findMostInType(temp) filterLst = major.values() self.recordSymbol(symbolKey, major) res0 = self.filterdata(temp, lst = filterLst) res = pd.concat([res, res0]) del temp, res0; gc.collect() if self.type != 2: return res if self.type == 2: agg_flag = [True if 'agg' in col_name else False for col_name in res.columns.values] rolling_flag = [True if 'agg' not in col_name else False for col_name in res.columns.values] agg_res, rolling_res = res[res.columns.values[agg_flag]], res[res.columns.values[rolling_flag]] return agg_res, rolling_res def recordSymbol(self, date, symbolLst): '''record symbol and ticker''' self.symbolDict[date] = symbolLst def sampledata(self, data, period, how = 'first'): df = data.copy() if how == 'first': res = df.resample(period).first() if how == 'mean': res = df.resample(period).mean() if how == 'last': res = df.resample(period).last() del df; gc.collect() res.dropna(how = 'all',axis = 0, inplace=True) return res def shift_align(self, data, target, lag, align_base): '''first shift data of target colume at lag and then align it to origin dataframe''' if len(target) == 2: target = target else: target = target[:2] df = data.copy() targetCol = self.getsymbol(df, target) temp = pd.DataFrame(df[targetCol].shift(periods=-int(lag[:-1]), freq = lag[-1])) temp = self.align_drop(data=temp, base = align_base) df[targetCol] = temp return df def get_align_base(self, df): #获取用于align的base,来源是初始数据的datetime index '''get index as the align base for later align''' align_base = pd.DataFrame([1 for i in range(df.shape[0])],index=df.index) align_base['helper'] = align_base.index align_base.drop_duplicates(subset='helper', inplace=True) align_base.drop('helper', axis=1, inplace=True) return align_base def align_drop(self, data, base): '''align target data to base index''' df = data.copy() _, df = base.align(df, join='left', axis = 0) df = pd.DataFrame(df) df['helper'] = df.index df.drop_duplicates(subset = 'helper', inplace=True) df.drop('helper', axis=1, inplace=True) return df def getsymbol(self, lst, ticker): #依据前两个symbol得到对应的ticker '''column name according to ticker as column name maybe ru0 or ru1 or ru2 and use this function to find symbol''' if '0' == ticker[-1]: ticker = ticker[:-1] if len(ticker) == 3: ticker = ticker[:2] if len(ticker) == 1: ticker = ticker + '1' for name in lst: if ticker == name[:2]: return name def midPrice(self, df): # 计算mid_pricr flag = (df.ask_price * df.bid_price) != 0 if flag.all(): df.loc[:, 'mid_price'] = (df.ask_price + df.bid_price) / 2 else: bid_index, ask_index = 1, 3 mid_price = [] for i in range(df.shape[0]): if (df.iloc[i,bid_index] != 0) and (df.iloc[i,ask_index] != 0): mid_price.append((df.iloc[i,bid_index] + df.iloc[i,ask_index])/2) elif df.iloc[i,bid_index] == 0: mid_price.append(df.iloc[i, ask_index]) elif df.iloc[i,bid_index] == 0: mid_price.append(df.iloc[i, bid_index]) else: mid_price.append(0) df.loc[:,'mid_price'] = mid_price df.mid_price.replace(0,method='ffill', inplace=True) def rollingRet(self, df): res = [0] for i in range(1, df.shape[0]): if df.mid_price.values[i - 1] == 0: temp = 0 else: temp = (df.mid_price.values[i] - df.mid_price.values[i - 1]) / df.mid_price.values[i - 1] res.append(temp) df.loc[:, 'rolling_return'] = res def aggravatedRet(self, df): df.loc[:, 'aggravated_return'] = df.loc[:, 'rolling_return'].values.cumsum() def calcAll(self, df): self.midPrice(df) self.rollingRet(df) self.aggravatedRet(df) def filterName(self, lst): # 判断是否为期权 '''judge whether option or not''' ans = [] for name in lst: if not ('-P-' in name or '-C-' in name or 'SR' in name): ans.append(name) return ans def findMostInType(self, df): #寻找主力合约 后续补充选择次要合约、第三合约的代码 if self.level == 0: dic = df.groupby('ticker')['turnover'].max() lst = dic.index.values lst = self.filterName(lst) existed = [] length = {} most = {} for name in lst: l = dic[name] if name[:2] in existed: if l > length[name[:2]]: most[name[:2]] = name length[name[:2]] = l else: existed.append(name[:2]) length[name[:2]] = l most[name[:2]] = name return most def filtervolu(self, df, lst, threshold=1000, volu='ask_volume'): '''lst is a list of option that want to keep from raw dataframe''' keywd = volu align_base = self.get_align_base(df) res = pd.DataFrame() for name in lst: temp = df[df['ticker'] == name] if temp.shape[0] < threshold: continue else: self.calcAll(temp) temp = temp.rename(columns={keywd: name[:2] + str(self.level)}) temp = pd.DataFrame(temp.loc[:, name[:2] + str(self.level)]) temp = self.align_drop(data=temp, base=align_base) res = pd.concat([res, temp], axis=1) res.fillna(method='ffill', axis=0, inplace=True) res.fillna(method='bfill', axis=0, inplace=True) return res def getvolu(self, dayLst, filterLst='major', split=2): '''load multidays and filter and concat together split means split one second into how many parts, choose from [2,4]''' if len(dayLst) == 1: symbolKey = dayLst[0] else: symbolKey = dayLst[0] + '-' + dayLst[-1] temp = self.loaddata(day=dayLst[0], split=split) if filterLst == 'major': major = self.findMostInType(temp) self.recordSymbol(symbolKey, major) filterLst = major.values() res = self.filtervolu(temp, lst=filterLst) del temp; gc.collect() if len(dayLst) > 1: for day in dayLst[1:]: temp = self.loaddata(day=day, split=split) major = self.findMostInType(temp) filterLst = major.values() self.recordSymbol(symbolKey, major) res0 = self.filtervolu(temp, lst=filterLst) res = pd.concat([res, res0]) del temp, res0 gc.collect() return res def appointedLst(self, data, lst): tempLst = [] for elem in lst: temp = self.getsymbol(data,elem) tempLst.append(temp) appointed = data.loc[:,tempLst] return appointed def saveFigCsv(return_df, period, output_dir, date, figsize=(30,20), fontsize=10): #路径仅由output_dir指定,freq、date仅影响文件名 fig,ax = plt.subplots(figsize = figsize) sns.set(font_scale=1.25) sns.heatmap(return_df.corr(), cmap='coolwarm', cbar=True, annot=True,square=True, fmt='.2f', annot_kws={'size': fontsize}) plt.xticks(rotation=45, fontsize=fontsize) plt.yticks(rotation=0, fontsize=fontsize) plt.title(u'correlation heatmap of major option', fontsize=fontsize) dir = output_dir + '/' if not os.path.exists(dir): os.makedirs(dir) fig.savefig(dir + date + '_' + period + '.jpg') plt.close() if 'ind' in return_df.columns.values: return_df.drop('ind', axis=1, inplace = True) return_df.to_csv(dir +date+'_'+period+'_return.csv') return_df.corr().to_csv(dir + date + '_' + period + '_corr.csv') def findNstElem(retmat, ticker, k= 10): #找出与单一期权相关度最高的k个 cols = retmat.corr().nlargest(k, ticker)[ticker].index return retmat.loc[:, cols]
true
d141f163edb8c1bb98726fa55ca7f91a256781a0
Python
nu-manycore/EX
/EX2_P/thread_sample.py
UTF-8
1,592
2.90625
3
[]
no_license
#!/usr/bin/env python3 from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ProcessPoolExecutor import time num_list = [25000000, 20000000, 20076000, 14500000] Is_Submit = False # application just taking long time def killing_time(number): return_list = [] for i in range(1, number + 1): if number % i == 1: if i <= 9999: return_list.append(i) return return_list # Sequential start = time.time() for num in num_list: killing_time(num) stop = time.time() print('Sequential: %.2f seconds' % (stop - start)) # Thread: may be no speed-up due to python GIL start = time.time() if Is_Submit == True: # Using submit executor = ThreadPoolExecutor(max_workers=4) futures = [] for num in num_list: future = executor.submit(killing_time, num) futures.append(future) executor.shutdown() else: # Using map with ThreadPoolExecutor(max_workers=4) as excuter: result_list = list(excuter.map(killing_time, num_list)) stop = time.time() print('Thread: %.2f seconds' % (stop - start)) # Process start = time.time() if Is_Submit == True: # Using submit executor = ProcessPoolExecutor(max_workers=4) futures = [] for num in num_list: future = executor.submit(killing_time, num) futures.append(future) executor.shutdown() else: # Using map with ProcessPoolExecutor(max_workers=4) as excuter: result_list = list(excuter.map(killing_time, num_list)) stop = time.time() print('Process: %.2f seconds' % (stop - start))
true
235891198c65aec71392fa9bc60de49889277acd
Python
GauravBhardwaj/pythonDS
/FindMedian.py
UTF-8
796
3.59375
4
[]
no_license
__author__ = 'gbhardwaj' def checkio(data): ''' Calculate length, see if it has even elements or odd elements [1.1.3.40] , length = 4 , median = 4/2 = [3] and 2-1 = [1] median = 1+3/2 [1,3,4,5,6], median will be :param data: :return: ''' data.sort() length = len(data) #print(data,length) if length%2==0: #even elements return float (data[((length)/2)]+data[((length/2)-1)]) / 2 else: return data[(length-1)/2] #These "asserts" using only for self-checking and not necessary for auto-testing print checkio([1, 2, 3, 4, 5]) print checkio([3, 1, 2, 5, 3]) print checkio([1, 300, 2, 200, 1]) print checkio([3, 6, 20, 99, 10, 15]) print("Start the long test") print checkio(range(1000000)) print("The local tests are done.")
true
be9e0704e02680b96ead5ca138fdb01d5fb7de3d
Python
ktsujino/sgns
/apply_mecab.py
UTF-8
549
2.5625
3
[]
no_license
import glob import os import MeCab from document_reader import document_reader if __name__ == '__main__': mecab = MeCab.Tagger() IN_DIR = '../sese/wikipedia/text' OUT_FILE = './corpus.txt' ofp = open(OUT_FILE, 'w') for line in document_reader(IN_DIR, split=False): words = [] node = mecab.parseToNode(line) while node: word = node.surface if word: words.append(word) node = node.next ofp.write('{}\n'.format(' '.join(words)))
true
02a9f7b040c327059dbf0be0e891f7bef53f0231
Python
sraddon/SUMO-V2X-Communication-Research-Platooning-and-CIM
/src/vehicle.py
UTF-8
2,632
2.65625
3
[ "MIT" ]
permissive
import traci class Vehicle(): def __init__(self, vehicle): self._active = True self._acceleration = traci.vehicle.getAcceleration(vehicle) self._length = traci.vehicle.getLength(vehicle) self._maxSpeed = traci.vehicle.getMaxSpeed(vehicle) self._name = vehicle self._route = traci.vehicle.getRoute(vehicle) self._previouslySetValues = dict() def getAcceleration(self): return self._acceleration def isActive(self): return self._active def getEdge(self): return traci.vehicle.getRoadID(self.getName()) def getLane(self): return traci.vehicle.getLaneID(self.getName()) def getLaneIndex(self): return traci.vehicle.getLaneIndex(self.getName()) def getLanePosition(self): return traci.vehicle.getLanePosition(self.getName()) def getLanePositionFromFront(self): return traci.lane.getLength(self.getLane()) - self.getLanePosition() def getLeader(self): return traci.vehicle.getLeader(self.getName(), 20) def getLength(self): return self._length def getMaxSpeed(self): return self._maxSpeed def getName(self): return self._name def getRemainingRoute(self): return self._route[traci.vehicle.getRouteIndex(self.getName()):] def getRoute(self): return self._route def getSpeed(self): return traci.vehicle.getSpeed(self.getName()) def setColor(self, color): self._setAttr("setColor", color) def setInActive(self): self._active = False def setImperfection(self, imperfection): self._setAttr("setImperfection", imperfection) def setMinGap(self, minGap): self._setAttr("setMinGap", minGap) def setTargetLane(self, lane): traci.vehicle.changeLane(self.getName(), lane, 0.5) def setTau(self, tau): self._setAttr("setTau", tau) def setSpeed(self, speed): self._setAttr("setSpeed", speed) def setSpeedMode(self, speedMode): self._setAttr("setSpeedMode", speedMode) def setSpeedFactor(self, speedFactor): self._setAttr("setSpeedFactor", speedFactor) def _setAttr(self, attr, arg): # Only set an attribute if the value is different from the previous value set # This improves performance if self.isActive(): if attr in self._previouslySetValues: if self._previouslySetValues[attr] == arg: return self._previouslySetValues[attr] = arg getattr(traci.vehicle, attr)(self.getName(), arg)
true
d8007ab8402fa08c00c2f0e879e7faf6a0d81dde
Python
Aasthaengg/IBMdataset
/Python_codes/p03696/s680331200.py
UTF-8
123
3.09375
3
[]
no_license
n=int(input()) s=input() L=0 R=0 for i in s: if i==')': if R:R-=1 else:L+=1 else: R+=1 print('('*L+s+')'*R)
true
830145dbfb7d628eae6f25faaccba47bf70329ec
Python
mayumi04/laughing-memory
/kadai209_04.py
UTF-8
204
2.96875
3
[]
no_license
import json with open('text.json', 'r', encoding = 'utf_8') as f: data = json.load(f) for program_list in data['program_list']: l = list(program_list.values()) print(' '.join(l))
true
39f5ba198f0fe43fc7f9a16a042029eebea39fa1
Python
AnilKOC/ts-prediction
/prediction/rmt_stockprice.py
UTF-8
792
2.71875
3
[]
no_license
from numpy import corrcoef from pandas_datareader import data import math from .prediction import make_prediction def rmt_data(stock_id,stock_id2,date,date2): panel_data = data.DataReader(stock_id, 'yahoo', date, date2) close = panel_data['Close'] panel_data2 = data.DataReader(stock_id2, 'yahoo', date, date2) close2 = panel_data2['Close'] correlation(close,close2) def correlation(close,close2): correlation_set = [] correlation_degree = 5 for i in range(4, correlation_degree + 1): print("Correlation degree = ", i) for j in range(len(close) - i + 1): corr = corrcoef(close[j:j + i], close2[j:j + i])[0][1] if not math.isnan(corr): correlation_set.append(corr) make_prediction(correlation_set)
true
b3e80c3d7b1bbe50b795458ab368673182ac0175
Python
sevenhe716/LeetCode
/String/test_q383_ransom_note.py
UTF-8
424
2.734375
3
[]
no_license
import unittest from String.q383_ransom_note import Solution class TestRansomNote(unittest.TestCase): """Test q383_ransom_note.py""" def test_ransom_note(self): s = Solution() self.assertEqual(False, s.canConstruct('a', 'b')) self.assertEqual(False, s.canConstruct('aa', 'ab')) self.assertEqual(True, s.canConstruct('aa', 'aab')) if __name__ == '__main__': unittest.main()
true
683913f0a10515aef69a771b1832ecbbcc4d7b0f
Python
gpspelle/learning-python
/Python/exercises/guess.py
UTF-8
566
4.28125
4
[]
no_license
import random val = random.randint(1, 9) attempts = 0 while True: guess = input("Enter a number between 1 and 9. Or 'exit' to leave\n") if guess == "exit": break; attempts += 1 try: guess = int(guess) except TypeError: print("Incorrect input... exitting") break; if guess == val: print("You guessed the randomly sorted number!") print("Number of attempts: " + str(attempts)) break; elif guess > val: print("You guessed a higher number than the sorted one!") else: print("You guessed a smaller number than the sorted one!")
true
2d9a60efcc1d41e2d85d3e8a25f5dc0f4af652e9
Python
mzhuang1/Machine-Learning
/SupervisedLearning/NaiveBayesClassifier/ArticleTopicPrediction/article_topic_prediction.py
UTF-8
1,439
3.125
3
[]
no_license
from sklearn.datasets import fetch_20newsgroups from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer from sklearn.naive_bayes import MultinomialNB # Categories which we will use categories = ['alt.atheism', 'soc.religion.christian', 'comp.graphics', 'sci.med'] # Prepare training data from 20newsgroups trainingData = fetch_20newsgroups(subset='train', categories=categories, shuffle=True, random_state=42) #print("\n".join(trainingData.data[1].split("\n")[:10])) #print("Target is:", trainingData.target_names[trainingData.target[1]]) # Count the word occurrences countVectorizer = CountVectorizer() xTrainCounts = countVectorizer.fit_transform(trainingData.data) #print(countVectorizer.vocabulary_.get(u'software')) # Transform the word occurrences into tfidf tfidTransformer = TfidfTransformer() xTrainTfidf = tfidTransformer.fit_transform(xTrainCounts) # Create and fit multinomial naive bayes model model = MultinomialNB().fit(xTrainTfidf, trainingData.target) # Create test data new = ['This has nothing to do with church or religion', 'Software engineering is getting hotter and hotter nowadays'] xNewCounts = countVectorizer.transform(new) xNewTfidf = tfidTransformer.transform(xNewCounts) # Predict category for our test data predicted = model.predict(xNewTfidf) # Print predictions for doc, category in zip(new,predicted): print('%r --------> %s' % (doc, trainingData.target_names[category]))
true
4c2d8f5c278f880e455af2090cfd9d26cc9012a2
Python
pedrograngeiro/Exercicios-Python
/exercicios cap 3/exercicio 3.8.py
UTF-8
297
4.21875
4
[]
no_license
# Escreva um programa que leia um valor em metros e o exiba convertido em milímetros # 1 metro = 1000 milimetros comprimento = int(input("Digite o valor do comprimento: ")) milimetros = comprimento * 1000 print("Foi digitado %d metros. Que em milimetros são: %d" % (comprimento, milimetros))
true
032aabed9bc3397d6e03d2d0ecba9524c1f2fdbf
Python
diacaf/image-enhance-keras
/imgpatch.py
UTF-8
12,846
2.796875
3
[ "MIT" ]
permissive
# coding=utf8 import PIL from PIL import ImageFont from PIL import Image from PIL import ImageDraw import random import numpy as np import scipy from scipy.misc import imsave, imread, imresize import numbers from scipy import sparse from numpy.lib.stride_tricks import as_strided from itertools import product from sklearn.feature_extraction.image import check_array import os, sys path = "/home/www/Image-Super-Resolution/val_images/set14nitre/" dirs = os.listdir( path ) multiple=12 ; suffixNew="new" def reconstruct_from_patches_2d(patches, image_size,step=16): """Reconstruct the image from all of its patches. Patches are assumed to overlap and the image is constructed by filling in the patches from left to right, top to bottom, averaging the overlapping regions. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- patches : array, shape = (n_patches, patch_height, patch_width) or (n_patches, patch_height, patch_width, n_channels) The complete set of patches. If the patches contain colour information, channels are indexed along the last dimension: RGB patches would have `n_channels=3`. image_size : tuple of ints (image_height, image_width) or (image_height, image_width, n_channels) the size of the image that will be reconstructed Returns ------- image : array, shape = image_size the reconstructed image """ countstep_i=0 countstep_j=0 i_h, i_w = image_size[:2] p_h, p_w = patches.shape[1:3] img = np.zeros(image_size) # compute the dimensions of the patches array n_h = i_h - p_h + 1 n_w = i_w - p_w + 1 print("Number of patches = %d, Patch Shape W H= (%d, %d)" % (patches.shape[0], n_h, n_w)) for p, (i, j) in zip(patches, product(range(n_h), range(n_w))): #img[i:i + p_h, j:j + p_w] += p if i % step==0 and j %step==0: img[i:i + p_h, j:j + p_w] = p print("i and j = (%d, %d)" % (i, j)) countstep_i+=1 countstep_j+=1 print (countstep_j) return img for i in range(i_h): for j in range(i_w): # divide by the amount of overlap # XXX: is this the most efficient way? memory-wise yes, cpu wise? #if i % 10==0 and j %10==0: img[i, j] /= float(min(i + 1, p_h, i_h - i) * min(j + 1, p_w, i_w - j)) return img ############################################################################### # From an image to a set of small image patches def _compute_n_patches(i_h, i_w, p_h, p_w, max_patches=None): """Compute the number of patches that will be extracted in an image. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- i_h : int The image height i_w : int The image with p_h : int The height of a patch p_w : int The width of a patch max_patches : integer or float, optional default is None The maximum number of patches to extract. If max_patches is a float between 0 and 1, it is taken to be a proportion of the total number of patches. """ n_h = i_h - p_h + 1 n_w = i_w - p_w + 1 all_patches = n_h * n_w if max_patches: if (isinstance(max_patches, (numbers.Integral)) and max_patches < all_patches): return max_patches elif (isinstance(max_patches, (numbers.Real)) and 0 < max_patches < 1): return int(max_patches * all_patches) else: raise ValueError("Invalid value for max_patches: %r" % max_patches) else: return all_patches def extract_patches(arr, patch_shape=8, extraction_step=1): """Extracts patches of any n-dimensional array in place using strides. Given an n-dimensional array it will return a 2n-dimensional array with the first n dimensions indexing patch position and the last n indexing the patch content. This operation is immediate (O(1)). A reshape performed on the first n dimensions will cause numpy to copy data, leading to a list of extracted patches. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- arr : ndarray n-dimensional array of which patches are to be extracted patch_shape : integer or tuple of length arr.ndim Indicates the shape of the patches to be extracted. If an integer is given, the shape will be a hypercube of sidelength given by its value. extraction_step : integer or tuple of length arr.ndim Indicates step size at which extraction shall be performed. If integer is given, then the step is uniform in all dimensions. Returns ------- patches : strided ndarray 2n-dimensional array indexing patches on first n dimensions and containing patches on the last n dimensions. These dimensions are fake, but this way no data is copied. A simple reshape invokes a copying operation to obtain a list of patches: result.reshape([-1] + list(patch_shape)) """ arr_ndim = arr.ndim if isinstance(patch_shape, numbers.Number): patch_shape = tuple([patch_shape] * arr_ndim) if isinstance(extraction_step, numbers.Number): extraction_step = tuple([extraction_step] * arr_ndim) patch_strides = arr.strides slices = [slice(None, None, st) for st in extraction_step] indexing_strides = arr[slices].strides patch_indices_shape = ((np.array(arr.shape) - np.array(patch_shape)) // np.array(extraction_step)) + 1 shape = tuple(list(patch_indices_shape) + list(patch_shape)) strides = tuple(list(indexing_strides) + list(patch_strides)) patches = as_strided(arr, shape=shape, strides=strides) return patches def extract_patches_2d(image, patch_size, max_patches=None, random_state=None): """Reshape a 2D image into a collection of patches The resulting patches are allocated in a dedicated array. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- image : array, shape = (image_height, image_width) or (image_height, image_width, n_channels) The original image data. For color images, the last dimension specifies the channel: a RGB image would have `n_channels=3`. patch_size : tuple of ints (patch_height, patch_width) the dimensions of one patch max_patches : integer or float, optional default is None The maximum number of patches to extract. If max_patches is a float between 0 and 1, it is taken to be a proportion of the total number of patches. random_state : int, RandomState instance or None, optional (default=None) Pseudo number generator state used for random sampling to use if `max_patches` is not None. If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- patches : array, shape = (n_patches, patch_height, patch_width) or (n_patches, patch_height, patch_width, n_channels) The collection of patches extracted from the image, where `n_patches` is either `max_patches` or the total number of patches that can be extracted. Examples -------- >>> from sklearn.feature_extraction import image >>> one_image = np.arange(16).reshape((4, 4)) >>> one_image array([[ 0, 1, 2, 3], [ 4, 5, 6, 7], [ 8, 9, 10, 11], [12, 13, 14, 15]]) >>> patches = image.extract_patches_2d(one_image, (2, 2)) >>> print(patches.shape) (9, 2, 2) >>> patches[0] array([[0, 1], [4, 5]]) >>> patches[1] array([[1, 2], [5, 6]]) >>> patches[8] array([[10, 11], [14, 15]]) """ i_h, i_w = image.shape[:2] p_h, p_w = patch_size if p_h > i_h: raise ValueError("Height of the patch should be less than the height" " of the image.") if p_w > i_w: raise ValueError("Width of the patch should be less than the width" " of the image.") image = check_array(image, allow_nd=True) image = image.reshape((i_h, i_w, -1)) n_colors = image.shape[-1] extracted_patches = extract_patches(image, patch_shape=(p_h, p_w, n_colors), extraction_step=1) n_patches = _compute_n_patches(i_h, i_w, p_h, p_w, max_patches) if max_patches: rng = check_random_state(random_state) i_s = rng.randint(i_h - p_h + 1, size=n_patches) j_s = rng.randint(i_w - p_w + 1, size=n_patches) patches = extracted_patches[i_s, j_s, 0] else: patches = extracted_patches patches = patches.reshape(-1, p_h, p_w, n_colors) # remove the color dimension if useless if patches.shape[-1] == 1: return patches.reshape((n_patches, p_h, p_w)) else: return patches def reconstruct_from_patches_2dlocal(patches,patchcnn, image_size,step=16): countstep_i=0 countstep_j=0 i_h, i_w = image_size[:2] p_h, p_w = patches.shape[1:3] img = np.zeros(image_size) # compute the dimensions of the patches array n_h = i_h - p_h + 1 n_w = i_w - p_w + 1 cnt=0 #print("Number of patches = %d, Patch Shape W H= (%d, %d)" % (patches.shape[0], n_h, n_w)) for p, (i, j) in zip(patches, product(range(n_h), range(n_w))): #img[i:i + p_h, j:j + p_w] += p if i % step==0 and j %step==0: #img[i:i + p_h, j:j + p_w] = p img[i:i + p_h, j:j + p_w] = patchcnn[cnt] cnt+=1 #print("i and j = (%d, %d)" % (i, j)) #countstep_i+=1 #countstep_j+=1 #print (cnt) #return img cnt_i=0 cnt_j=0 for i in range(i_h): for j in range(i_w): # divide by the amount of overlap # XXX: is this the most efficient way? memory-wise yes, cpu wise? #if i % 10==0 and j %10==0: if i % step==0 and j %step==0: img[i, j] /= 1 #print(img[i, j]) #print("i + 1, p_h, i_h - i = (%d, %d, %d), j + 1, p_w, i_w - j= (%d, %d, %d)" % (i + 1, p_h, i_h - i,j + 1,p_w ,i_w - j )) #img[i, j] /= float(min(i + 1, p_h, i_h - i) * # min(j + 1, p_w, i_w - j)) #print(img[i, j]) #print("i + 1, p_h, i_h - i = (%d, %d, %d), j + 1, p_w, i_w - j= (%d, %d, %d)" % (i + 1, p_h, i_h - i,j + 1,p_w ,i_w - j )) cnt_j+=1 cnt_i+=1 cnt_j =0 return img def extract_patches_2dlocal(image,patches, patch_size, step=None): i_h, i_w = image.shape[:2] p_h, p_w = patch_size if p_h > i_h: raise ValueError("Height of the patch should be less than the height" " of the image.") if p_w > i_w: raise ValueError("Width of the patch should be less than the width" " of the image.") #i_h, i_w = image_size[:2] p_h, p_w = patches.shape[1:3] img = np.zeros(image.shape) # compute the dimensions of the patches array n_h = i_h - p_h + 1 n_w = i_w - p_w + 1 nb_patch_new=0 #print("Number of patches = %d, Patch Shape W H= (%d, %d)" % (patches.shape[0], n_h, n_w)) for p, (i, j) in zip(patches, product(range(n_h), range(n_w))): #img[i:i + p_h, j:j + p_w] += p if i % step==0 and j %step==0: img[i:i + p_h, j:j + p_w] = p #print("i and j = (%d, %d)" % (i, j)) nb_patch_new+=1 new_patch= np.zeros( (nb_patch_new,p_h,p_w,3)) nb_patch_cnt=0 for p, (i, j) in zip(patches, product(range(n_h), range(n_w))): #img[i:i + p_h, j:j + p_w] += p if i % step==0 and j %step==0: new_patch[nb_patch_cnt] = p nb_patch_cnt+=1 print (nb_patch_new) return new_patch step_patch=16 patch_size=128 path="/home/www/Image-Super-Resolution/imapatch/" filepatched="PATCHEDz_woman_GT.bmp" name="z_woman_GT.bmp" ima=imread(path+name, mode='RGB') patches = extract_patches_2d(ima, (patch_size, patch_size)) print(patches.shape) patches_nn = extract_patches_2dlocal(ima,patches, (patch_size, patch_size) ,step=step_patch) print(patches_nn.shape) recon = reconstruct_from_patches_2dlocal(patches,patches_nn, ima.shape ,step=step_patch) #recon = reconstruct_from_patches_2d(patches, ima.shape ,step=step_patch) imsave(path+filepatched, recon)
true
c6bebadd082215c29fee1937b620fe3193087806
Python
DavidHdezU/InterviewPracticeProblems
/LinkedLists/ReverseLinkedList.py
UTF-8
794
4
4
[ "MIT" ]
permissive
class ListNode: def __init__(self, val, next = None): self.val = val self.next = next def reverse(head): prev = None curr = head while curr: next_node = curr.next curr.next = prev prev = curr curr = next_node head = prev return head def print_list(head): res = [] while head: res.append(str(head.val)) if head.next: res.append("->") head = head.next return ''.join(res) linkedList = ListNode(0) linkedList.next = ListNode(1) linkedList.next.next = ListNode(2) linkedList.next.next.next = ListNode(3) print(print_list(linkedList.next)) linkedList.next = reverse(linkedList.next) print(print_list(linkedList.next)) #(reversed.val)
true
ad5652e7fc4ed8786f17bcfe2ec0a0054dcf6bb6
Python
matevz-spacapan/IEPS
/pa1/crawler/database/connector.py
UTF-8
2,040
2.546875
3
[]
no_license
from contextlib import contextmanager from psycopg2 import connect, extras from threading import Lock class Database: def __init__(self, autoconnect=True): super(Database, self).__init__() self.DEBUG = False self.lock = Lock() self.username = 'user' self.password = 'SecretPassword' self.host = 'localhost' self.port = '5432' self.dbname = 'test' self.DSN = "dbname='%s' user='%s' host='%s' password='%s'" \ % (self.dbname, self.username, self.host, self.password) self.cursor_factory = extras.RealDictCursor self.connection, self.cursor = None, None if autoconnect: self.connection = self.get_connection() def test(self): with self.transaction() as cur: cur.execute("SELECT 1;") return True def set_debug(self, value=True): self.DEBUG = value def get_connection(self): if self.connection and not self.connection.closed: return self.connection else: conn = connect(self.DSN) self.connection = conn return self.connection def get_cursor(self, conn=None): if not conn: conn = self.get_connection() if conn.closed: conn = self.get_connection() if self.cursor and not self.cursor.closed: return self.cursor self.cursor = conn.cursor(cursor_factory=self.cursor_factory) return self.cursor @contextmanager def transaction(self): con, cur = None, None with self.lock: try: con = self.get_connection() cur = self.get_cursor(con) yield cur con.commit() except Exception as e: if con: con.rollback() raise e finally: if cur: cur.close() if False and con: con.close()
true
902f830aa24996a8fb2897e02f8c211106b18a7b
Python
VictorWinberg/fjuo52
/HA1/C1.py
UTF-8
4,415
3.09375
3
[]
no_license
import binascii import hashlib from sys import argv from random import randrange, sample from functools import reduce def get_n(p, q): return p * q def totient(p, q): return (p - 1) * (q - 1) def modinv_x(x, a, n): x = toInt(x) if type(x) != int else x return hex(modinv(a, n) * x % n) # (a * a_inv) == 1 (mod n) => a_inv = modinv(a) (mod n) # copied from https://en.wikibooks.org/wiki/Algorithm_Implementation/Mathematics/Extended_Euclidean_algorithm#Python def modinv(a, n): g, x, _ = euc_algorithm(a, n) if g == 1: return x % n # copied from https://en.wikibooks.org/wiki/Algorithm_Implementation/Mathematics/Extended_Euclidean_algorithm#Python def euc_algorithm(a, n): x0, x1, y0, y1 = 1, 0, 0, 1 while n != 0: q, a, n = a // n, n, a % n x0, x1 = x1, x0 - q * x1 y0, y1 = y1, y0 - q * y1 return a, x0, y0 # Encryption, decryption and blind signatures def encrypt(m, e, n): m_int = byte_to_int(m.encode('utf-8')) if m_int != m_int % n: raise Exception('Max length of message exceeded') c_int = pow(m_int, e, n) c = int_to_byte(c_int).decode('latin-1') return c def decrypt(c, e, n, d): c_int = byte_to_int(c.encode('latin-1')) m_int = pow(c_int, d, n) return int_to_byte(m_int).decode('utf-8') def hash(x): x = hex(x) if type(x) == int else x return hashlib.sha1(x.encode('utf-8')).hexdigest() def blind_hash(x, r, e, n): hash_x = toInt(hash(x)) return hex(pow(r, e) * hash_x % n) def blind(x, r, e, n): return hex(pow(r, e) * x % n) def sign(x, n, d): x = toInt(x) if type(x) != int else x return hex(pow(x, d, n)) def verify(x, signature, e, n): x, signature = toInt(x), toInt(signature) return x == pow(signature, e, n) def blind_signature(m, r, e, n, d): h_blind = blind_hash(m, r, e, n) h_blind_signed = sign(h_blind, n, d) return modinv_x(h_blind_signed, r, n) # Converters def byte_to_int(x, byteorder='big'): return int.from_bytes(x, byteorder) def int_to_byte(x, byteorder='big'): length = (x.bit_length() + 7) // 8 return x.to_bytes(length, byteorder) def toInt(x): return int(x, 16) def mul_sum(array): return reduce(lambda x, y: x * y % n, array) def f(x, y): return x * y def find_prime_factors(n): i = 2 p = n while i * i <= p: if n % i: i += 1 else: p //= i q = n // p return p, q if __name__ == "__main__": if len(argv) == 4: p, q, e = map(int, argv[1:]) else: p, q = 671998030559713968361666935769, 282174488599599500573849980909 e = 17 # Get the value n and private key d n = get_n(p, q) d = modinv(e, totient(p, q)) print('e:', e, '\nn:', n, '\nd:', d) # Blind signature m = 'Give 10 coins!' blind_sign = blind_signature(m, 7, e, n, d) print('\nm:', m, '\nsign:', blind_sign) print('verified:', verify(hash(m), blind_sign, e, n), '\n') # # Improved protocol, withdrawal # # Alice chooses 2k quadruples ID = 1234 print('ID:', ID) k, XY, quadruples, B = 8, [], [], [] for i in range(2 * k): a, c, _d, r = [ randrange(1, 10 ** 20) for i in range(4) ] x, y = toInt(hash(a + c)), toInt(hash(a ^ ID + _d)) b = pow(r, e) * f(x, y) % n B.append(b), quadruples.append([a, c, _d, r]), XY.append([x, y]) # Bank uses cut-and-choose R = sample(range(len(B)), len(B) // 2) notR = [ i for i in range(len(B)) if i not in R ] # Bank verifies half of the R values B_bank = [0] * len(B) for i in R: a, c, _d, r = quadruples[i] x, y = toInt(hash(a + c)), toInt(hash(a ^ ID + _d)) B_bank[i] = pow(r, e) * f(x, y) % n print('Bank verified:', all(B[i] == B_bank[i] for i in R)) # Bank then signs the other half of the R values B_sign = [B[i] for i in notR] S_blind = pow(mul_sum(B_sign), d, n) # Alice calculates S without blind values R_arr = [quadruples[i][3] for i in notR] _R = mul_sum(R_arr) # S = modinv_x(S_blind, R, n) S = modinv(_R, n) * S_blind % n print('S:', hex(S)) # Alice verifies by adding all x, y values included in sign F_arr = [f(*XY[b]) % n for b in notR] F = mul_sum(F_arr) print('Alice verifies:', F == pow(S, e, n)) # Debugging import pdb debug = input("Debug (y/N)? ") if(debug == 'y' or debug == 'yes'): pdb.set_trace() # # Could be used to convert RSA base64 keys # # def b64_to_int(x): # return byte_to_int(binascii.a2b_base64(x)) # # def int_to_b64(x): # return binascii.b2a_base64(int_to_byte(x))
true
caaac816a8191ccb114c3b17d8ded0e091574a77
Python
andrew12678/multilayer-perceptron-numpy
/src/optimiser/sgd.py
UTF-8
2,109
3.203125
3
[]
no_license
from .optimiser import Optimiser from typing import List from collections import namedtuple from ..layers.linear import Layer # Create named tuple to store previous velocities of params Parameter = namedtuple("Parameter", ["weights", "biases"]) class SGD(Optimiser): def __init__( self, layers: List[Layer], learning_rate: float, weight_decay: float, momentum: float, ): """ Updates the parameters at the end of each batch in SGD fashion. Note that when momentum==0, the new_weight_velocity term is simply self.lr*layer.grad_W as in vanilla SGD. Args: layers (List[Layer]): List of parameters holding Layer objects (either BN or Linear) learning_rate (float): the learning rate of the optimiser weight_decay (float): the decay factor applied to the weights before an update. momentum (float): the momentum parameter. If this is 0 then we just get normal SGD """ super().__init__(layers) self.lr = learning_rate self.weight_decay = weight_decay self.momentum = momentum self.velocity = [Parameter(0, 0) for _ in range(len(layers))] # Step function that optimises all layers def step(self): # Loop through all layers for idx, layer in enumerate(self.layers): # Calculate current velocity for weight parameters new_weight_velocity = ( self.momentum * self.velocity[idx].weights + self.lr * layer.grad_W ) # Calculate current velocity for biases new_bias_velocity = ( self.momentum * self.velocity[idx].biases + self.lr * layer.grad_b ) # Update velocity for next backward-pass self.velocity[idx] = Parameter(new_weight_velocity, new_bias_velocity) # Update weights layer.weights = (1 - self.weight_decay) * ( layer.weights - new_weight_velocity ) # Update biases layer.biases -= new_bias_velocity
true
4de35dae62aa342f7a060790f79fb9c4c3c293a1
Python
fcooper8472/PolynomialFractal
/polynomial_fractal.py
UTF-8
4,028
3
3
[ "Unlicense" ]
permissive
import itertools import matplotlib.pyplot as plt import numpy as np ###################### # Parameterss to set # ###################### # Maximum polynomial degree. Don't set above 20 unless you're willing to wait! max_deg = 23 # Number of bins which directly relates to the resolution of the output image. num_bins = 2800 # The radius about 0 in the complex plane that is binned. Radius >= 2 will include all roots. radius = 2. # The percentile above which bin values are clipped, improving the dynamic range of the image. percentile = 98 ###################### ###################### ###################### def get_roots_fname(_deg): return 'roots/degree_%s.npy' % str(_deg).zfill(2) def get_bins_fname(_num_bins, _radius, _max_degree): return 'bins/%d_%.1f_%d.npy' % (_num_bins, _radius, _max_degree) def calculate_roots(_deg): # Try loading the roots from file. If they don't exist, create them. try: np.load(get_roots_fname(_deg), allow_pickle=False) print('Roots for degree %s already exist.' % str(_deg).rjust(2, ' '), flush=True) except IOError: # There are deg+1 coefficients in a degree deg polynomial. # Each coefficient can be +-1 (except the first, or we will double-count) # Therefore, 2^deg polynomials, each having deg roots print('Calculating roots for degree %s... ' % str(_deg).rjust(2, ' '), end='', flush=True) all_roots = np.empty((2**_deg, _deg), np.complex64) # itertools.product([1., -1.], repeat=_deg) gives all 2^deg coefficient lists for polynomials of degree deg-1. # We prepend [1.] to each of these to generate our 2^deg coefficient lists for unique degree deg polynomials. for i, x in enumerate(itertools.product([1., -1.], repeat=_deg)): all_roots[i] = np.roots([1.] + list(x)) np.save(get_roots_fname(_deg), all_roots.flatten(), allow_pickle=False) print('done.', flush=True) def bin_roots(_num_bins, _radius): # Determine the maximum degree available to us _deg = 1 while True: try: np.load(get_roots_fname(_deg), allow_pickle=False) _deg += 1 except IOError: break _max_deg = _deg - 1 # Try loading the pre-binned data from file. If it doesn't exist, create it. try: _bins = np.load(get_bins_fname(_num_bins, _radius, _max_deg), allow_pickle=False) print('Binning already complete for these parameters.', flush=True) return _bins except IOError: _bins = np.zeros((_num_bins, _num_bins), dtype=np.uint32) _bin_size = 2. * _radius / _num_bins for _deg in range(1, 1 + _max_deg): print('Binning roots for degree %s... ' % str(_deg).rjust(2, ' '), end='', flush=True) roots_this_deg = np.load(get_roots_fname(_deg), allow_pickle=False) # Find the correct bin imag_bins = np.floor((roots_this_deg.imag + _radius) / _bin_size).astype(np.uint16) real_bins = np.floor((roots_this_deg.real + _radius) / _bin_size).astype(np.uint16) # Account for possible boundary issues if radius was picked too small imag_bins[imag_bins > _num_bins - 1] = _num_bins - 1 real_bins[real_bins > _num_bins - 1] = _num_bins - 1 imag_bins[imag_bins < 0] = 0 real_bins[real_bins < 0] = 0 for x in zip(imag_bins, real_bins): _bins[x] += 1 print('done.', flush=True) np.save(get_bins_fname(_num_bins, _radius, _max_deg), _bins, allow_pickle=False) return _bins if __name__ == '__main__': for deg in range(1, 1+max_deg): calculate_roots(deg) bins = bin_roots(num_bins, radius) threshold = np.percentile(bins[np.nonzero(bins)], percentile) bins[bins > threshold] = threshold norm = plt.Normalize(0, threshold, clip=True) cmap = plt.get_cmap('inferno') image = cmap(norm(bins)) plt.imsave('PolynomialFractal.png', image)
true
76f195d7f651bb55715e526629cd97d4bb2c1cbd
Python
alb7979s/datastructuresAndAlgorithms
/samsungProblems/BOJ/17822_원판돌리기.py
UTF-8
2,058
2.921875
3
[]
no_license
# https://www.acmicpc.net/problem/17822 from sys import* input=stdin.readline def printf(): for i in range(n): for j in range(m): print(board[i][j],end=' ') print() print() # 클린코드에서 인자 많이쓰지 말랬는디 def rotate(x, d, k): global board for i in range(1, n+1): if i%x == 0: temp = [] for j in range(m): if d==1: temp.append(board[i-1][(j+k)%m]) else: temp.append(board[i-1][(j+(m-k))%m]) for j in range(m): board[i-1][j] = temp[j] def solve(): global board fire = [[0]*m for _ in range(n)] total, cnt, fired = 0, 0, 0 for x in range(n): for y in range(m): if board[x][y]: for dx, dy in [(0, 1), (1, 0)]: # 다른 언어로 풀 시 %연산 다르게 해줘야함(음수 나머지 연산이 다름) nx, ny = (x+dx), (y+dy)%m if nx > n-1: continue if board[nx][ny] == board[x][y]: fire[nx][ny] = 1 fire[x][y] = 1 fired = 1 cnt+=1 total += board[x][y] if cnt: avg = total/cnt if not fired: for x in range(n): for y in range(m): if board[x][y]: if board[x][y] > avg: board[x][y] -= 1 elif board[x][y] < avg: board[x][y] += 1 else: for x in range(n): for y in range(m): if fire[x][y]: board[x][y] = 0 n, m, t = map(int, input().split()) board = [list(map(int, input().split()))for _ in range(n)] for i in range(t): #x의 배수인 원판을 d방향(0시계, 1반시계)으로 k칸 회전 x, d, k = map(int, input().split()) rotate(x, d, k) # printf() solve() # printf() res = 0 for i in range(n): res += sum(board[i]) print(res)
true
0aaf13c6253d5fb0e306c8ec5cc74bee420c5d72
Python
aquaeye-ai/calacademy-fish-id
/lib/scripts/png_to_jpg_for_directory.py
UTF-8
2,081
2.765625
3
[]
no_license
""" Script that wraps mogrify and rm to convert all png images to jpg format and then remove the png images if desired. """ import os import yaml import subprocess # from tensorflow.python.tools import freeze_graph if __name__ == "__main__": # we expect, as a hand-shake agreement, that there is a .yml config file in top level of lib/configs directory config_dir = os.path.join(os.curdir, 'configs') yaml_path = os.path.join(config_dir, 'png_to_jpg_for_directory.yml') with open(yaml_path, "r") as stream: config = yaml.load(stream) ## collect hyper parameters/args from config # NOTE: float() is required to parse any exponentials since YAML sends exponentials as strings source_directory = config["source_directory"] remove_png = config["remove_png"] # convert all png to jpg mog_process = subprocess.Popen("mogrify -verbose -format jpg {}/*.png".format(source_directory), shell=True) # stdout=subprocess.PIPE, # stderr=subprocess.PIPE) # Popen is asynchronous and mogrify takes a while for large directorys, so we must wait for it to complete before # kicking off the command to start removing images. Otherwise, rm will run and remove images before mogrify has # converted them. mog_process.wait() # remove all png if desired if remove_png > 0: print("Removing png images") # this line seems to prevent a perceived race condition between the mogrify process ending and the rm process beginning -> sometimes the rm process doesn't complete without this line added rm_process = subprocess.Popen("rm -v {}/*.png".format(source_directory), shell=True) # stdout=subprocess.PIPE, # stderr=subprocess.PIPE) # there is a race condition (I think) where the rm process fails to fully complete, so we force the program to wait on it rm_process.wait()
true
670324826ccd11074350ccdeeb0384f7df74dd92
Python
ardhanii/covid19-sir
/covsirphy/loading/db_owid.py
UTF-8
3,791
2.6875
3
[ "Apache-2.0" ]
permissive
#!/usr/bin/env python # -*- coding: utf-8 -*- import country_converter as coco import pandas as pd from covsirphy.util.term import Term from covsirphy.loading.db_base import _RemoteDatabase class _OWID(_RemoteDatabase): """ Access "Our World In Data". https://github.com/owid/covid-19-data/tree/master/public/data https://ourworldindata.org/coronavirus Args: filename (str): CSV filename to save records """ # URL for vaccine data URL_V = "https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/" URL_V_REC = f"{URL_V}vaccinations.csv" URL_V_LOC = f"{URL_V}locations.csv" # URL for PCR data URL_P = "https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/" URL_P_REC = f"{URL_P}covid-testing-all-observations.csv" # Citation CITATION = "Hasell, J., Mathieu, E., Beltekian, D. et al." \ " A cross-country database of COVID-19 testing. Sci Data 7, 345 (2020)." \ " https://doi.org/10.1038/s41597-020-00688-8" # Column names and data types # {"name in database": "name defined in Term class"} COL_DICT = { "date": Term.DATE, "location": Term.COUNTRY, Term.PROVINCE: Term.PROVINCE, "iso_code": Term.ISO3, "vaccines": Term.PRODUCT, "total_vaccinations": Term.VAC, "people_vaccinated": Term.V_ONCE, "people_fully_vaccinated": Term.V_FULL, "tests": Term.TESTS, } def download(self, verbose): """ Download the dataset from the server and set the list of primary sources. Args: verbose (int): level of verbosity Returns: pandas.DataFrame Index reset index Columns defined by the first values of self.COL_DICT.values() Note: If @verbose is equal to or over 1, how to show the list will be explained. """ # Download datasets if verbose: print("Retrieving datasets from Our World In Data https://github.com/owid/covid-19-data/") # Vaccinations v_rec_cols = [ "date", "location", "iso_code", "total_vaccinations", "people_vaccinated", "people_fully_vaccinated"] v_rec_df = pd.read_csv(self.URL_V_REC, usecols=v_rec_cols) v_loc_df = pd.read_csv(self.URL_V_LOC, usecols=["location", "vaccines"]) v_df = v_rec_df.merge(v_loc_df, how="left", on="location") # Tests pcr_rec_cols = ["ISO code", "Date", "Daily change in cumulative total", "Cumulative total"] pcr_df = pd.read_csv(self.URL_P_REC, usecols=pcr_rec_cols) pcr_df = pcr_df.rename(columns={"ISO code": "iso_code", "Date": "date"}) pcr_df["cumsum"] = pcr_df.groupby("iso_code")["Daily change in cumulative total"].cumsum() pcr_df = pcr_df.assign(tests=lambda x: x["Cumulative total"].fillna(x["cumsum"])) # Combine data (vaccinations/tests) df = v_df.set_index(["iso_code", "date"]) df = df.combine_first(pcr_df.set_index(["iso_code", "date"]).loc[:, ["tests"]]) df = df.reset_index() # Location (country/province) df["location"] = df["location"].replace( { # COG "Congo": "Republic of the Congo", } ) df = df.loc[~df["iso_code"].str.contains("OWID_")] df["location"] = df.groupby("iso_code")["location"].bfill() df.loc[df["location"] == df["iso_code"], "location"] = None df.loc[df["location"].isna(), "location"] = df.loc[df["location"].isna(), "iso_code"].apply( lambda x: coco.convert(x, to="name_short", not_found=None)) df[self.PROVINCE] = self.UNKNOWN return df
true
1b3e37b90e9ae9f13622b3375374a0755bcded48
Python
myouds/phone_numbers
/phone_numbers.py
UTF-8
9,439
3.203125
3
[]
no_license
#!/usr/bin/env python3 import datetime from enum import Enum import math import json class CallParseError(Exception): pass class Direction(Enum): INCOMING = 'INCOMING' OUTGOING = 'OUTGOING' class PhoneNumber: ''' Phone number superclass. This should not be instantiated directly. Always use one of the subclasses. ''' cost_per_minute = None connection_charge = None off_peak_divider = None def __init__(self, number): if self.cost_per_minute is None or self.connection_charge is None: raise NotImplementedError('Cannot instantiate generic PhoneNumber') self.number = number @classmethod def from_string(cls, number): ''' Take a phone number in string form and return one of the PhoneNumber subclasses. The choice of subclass will depend on the first characters of the phone number ''' # # Standardise country code syntax by replacing '+' with '00' number = number.replace('+', '00') # # If number starts with '0044', replace that part with '0' because # it is not international if number.startswith('0044'): number = '0' + number[4:] # # Number is international if it starts with 00 if number[0:2] == '00': return InternationalNumber(number) # # Landline numbers start with 01 or 02 if number[0:2] in ['01', '02']: return LandlineNumber(number) # # Freephone numbers start with 080 if number[0:3] == '080': return FreeNumber(number) # # Mobile numbers start with 07 if number[0:2] == '07': # # Unless it is 076 and not 07624 if number[2] != '6' or number[0:5] == '07624': return MobileNumber(number) # # Everything else is invalid return InvalidNumber(number) # # Various classes of number have different costs # class InternationalNumber(PhoneNumber): cost_per_minute = 80 connection_charge = 50 class LandlineNumber(PhoneNumber): cost_per_minute = 15 connection_charge = 0 off_peak_divider = 3 class MobileNumber(PhoneNumber): cost_per_minute = 30 connection_charge = 0 off_peak_divider = 3 class FreeNumber(PhoneNumber): cost_per_minute = 0 connection_charge = 0 class InvalidNumber(PhoneNumber): cost_per_minute = 0 connection_charge = 0 # # Tarriff includes free minutes for some number classes # international_allowance = 10 landline_mobile_allowance = 100 class PhoneCall: peak_time_start = datetime.time(8, 0) peak_time_end = datetime.time(20, 0) def __init__(self, number, start_time, duration, direction): global international_allowance global landline_mobile_allowance # # Phone number class will depend on its first characters self.number = PhoneNumber.from_string(number) # # Start time is in ISO 8601 format but may end with 'Z' instead of # the +00:00 expected by datetime try: self.start_time = datetime.datetime.fromisoformat( start_time.replace('Z', '+00:00') ) except Exception as e: raise CallParseError( f'Error parsing start time {start_time}' ) from e # # Duration is in the format 'MM:SS'. # We need the number of started minutes try: minutes, seconds = duration.split(':') minutes = int(minutes) seconds = int(seconds) if seconds: minutes += 1 self.duration = minutes except Exception as e: raise CallParseError( f'Error parsing duration {duration}' ) from e # # Direction should be either INCOMING or OUTGOING self.direction = Direction(direction) # # Work out how much of the tarriff allowance this call has used # and reduce the remaining allowance accordingly. if self.direction is Direction.OUTGOING: if type(self.number) is InternationalNumber: self.free_minutes = min( self.duration, international_allowance ) international_allowance -= self.free_minutes elif type(self.number) in [LandlineNumber, MobileNumber]: self.free_minutes = min( self.duration, landline_mobile_allowance ) landline_mobile_allowance -= self.free_minutes else: self.free_minutes = 0 def cost(self, apply_allowance=True): if self.direction == Direction.INCOMING: return 0 # # Charge is per started minute with an additional connection charge # Subtract the free minutes from the started minutes if apply_allowance: chargeable_minutes = self.duration - self.free_minutes else: # # Free minutes can be optionally ignored for testing purposes chargeable_minutes = self.duration cost = self.number.connection_charge + \ (chargeable_minutes * self.number.cost_per_minute) if self.number.off_peak_divider is not None: start_time = self.start_time.time() if start_time < self.peak_time_start \ or start_time > self.peak_time_end: # # Off peak start time - divide cost by divider cost = cost // self.number.off_peak_divider return cost @classmethod def from_csv(cls, csv_line): # # CSV line should be in the format: # PhoneNumber,CallStartTime,CallDuration,CallDirection try: num, start, duration, direction = csv_line.rstrip('\n').split(',') except Exception as e: raise CallParseError( f'Error spliting CSV line - line is {csv_line}' ) from e return cls(num, start, duration, direction) @classmethod def from_csv_file(cls, file): with open(file, 'r') as f: while True: line = f.readline() if not line: break if len(line) > 1: yield cls.from_csv(line) def findMostExpensiveNumber(callLogFilepath): calls = PhoneCall.from_csv_file(callLogFilepath) # # Build up a mapping table of phone numbers and total call cost. # The table will be in the format # { # number: [number, total_cost], # number: [number, total_cost], # ... # } # The number is the dictionary key to simplify insertions, and it is # duplicated in the value to allow us to easily sort the values according # to total cost and then grab the number associated with the greatest cost number_table = dict() for call in calls: number_table.setdefault( call.number.number, [call.number.number, 0] )[1] += call.cost() two_most_expensive = sorted( number_table.values(), key=lambda x: x[1], reverse=True )[0:2] # # Return None if there has been no cost if two_most_expensive[0][1] == 0: return None # # Return None if it is a tie if len(two_most_expensive) == 2 and \ two_most_expensive[0][1] == two_most_expensive[1][1]: return None most_expensive = two_most_expensive[0] return_data = dict( PhoneNumber=most_expensive[0], TotalAmount='£%f' % (most_expensive[1] / 100) ) # # Set ensure_ascii=False to allow the £ sign to be printed properly return json.dumps(return_data, indent=4, ensure_ascii=False) # # Tests to be run by pytest # def test_phone_number(): numbers = [ ('07777777777', MobileNumber), ('07655555555', InvalidNumber), ('07624777777', MobileNumber), ('01858585858', LandlineNumber), ('02934567890', LandlineNumber), ('+441234565567', LandlineNumber), ('00441234565567', LandlineNumber), ('00011234565567', InternationalNumber), ('+011234565567', InternationalNumber), ('+11234565567', InternationalNumber), ('05678765432', InvalidNumber) ] for num in numbers: assert type(PhoneNumber.from_string(num[0])) is num[1] def test_csv(): csv = '07882456789,2019-08-29T11:28:05.666Z,12:36,OUTGOING' assert PhoneCall.from_csv(csv).cost(apply_allowance=False) == 390 csv = '07882456789,2019-08-29T20:28:05.666Z,12:36,OUTGOING' assert PhoneCall.from_csv(csv).cost(apply_allowance=False) == 130 csv = '07882456789,2019-08-29T20:28:05.666Z,12:36,INCOMING' assert PhoneCall.from_csv(csv).cost(apply_allowance=False) == 0 csv = '08082456789,2019-08-29T20:28:05.666Z,12:36,OUTGOING' assert PhoneCall.from_csv(csv).cost(apply_allowance=False) == 0 csv = '+017654765234,2019-08-29T15:28:05.666Z,1:0,OUTGOING' assert PhoneCall.from_csv(csv).cost(apply_allowance=False) == 130 if __name__ == '__main__': import sys if len(sys.argv) != 2: print(f'Usage: {sys.argv[0]} <Call log file>') sys.exit(1) print(findMostExpensiveNumber(sys.argv[1])) sys.exit(0)
true
77dbb8cb0f8729cd4df706fee0a8bb1a4e6a64bc
Python
young8179/pythonClass
/python102/15-caesar.py
UTF-8
603
3.875
4
[]
no_license
''' Caesar Cipher Use your solution to decipher the following text: "lbh zhfg hayrnea jung lbh unir yrnearq"''' sentence = input("input sentence: ") shift = int(input("shift: ")) alpha = "abcdefghijklmnopqrstuvwxyz" caesar = "" translation = "" ## make caesar alphabet caesar = caesar + alpha[shift: ] + alpha[0:shift] ## translate for letters in sentence : if letters == " ": translation = translation + letters elif letters != " ": translation = translation + caesar[alpha.index(letters)] print(translation) ## andswer = you must unlean what you have learned (shift 13)
true
fbc90cdf9bf4d3ce8dddd2886d761eea221aa519
Python
Aasthaengg/IBMdataset
/Python_codes/p02767/s774588648.py
UTF-8
233
2.90625
3
[]
no_license
from decimal import Decimal, ROUND_HALF_UP n = int(input()) x = list(map(int,input().split())) a = Decimal(sum(x) / n).quantize(Decimal('0'), rounding=ROUND_HALF_UP) ans = 0 for i in range(n): ans += (x[i] - a) ** 2 print(ans)
true
57422771b71d4e1be3e07636c548535607889ee6
Python
jusesyang/untitled1
/day3/unittestDemo.py
UTF-8
1,539
3.8125
4
[]
no_license
# 测试框架是干什么用的? # 最主要的用途是组织和执行测试用例 # 1.导入unittest框架 import unittest # java中的类和文件名的关系, public的类名和文件名一样 # python中的可以一样, 但是推荐: 文件名首字母小写,类名首字母大写,剩下一样 # 2.继承unittest中的父类 # python中的继承直接用小括号表示 # TestCase是测试用例的意思, 我们就在UnittestDemo中编写测试用例 class UnittestDemo(unittest.TestCase): # 3.重写父类中的方法setUp和tearDown # def是方法的关键字 # setUp 是创建的意思, # 类似于手动测试中的预置条件 def setUp(self): print("这个方法将会在测试用例执行前先执行") def tearDown(self): print("这个方法将会在测试用例方法之后执行") #4. 编写测试用例方法 # 只有以test开头命名的方法才是测试用例方法 # 测试用例方法,可以直接被运行 # 普通方法不能直接运行, 只有被调用才能执行 def test_log_in(self): print("登录测试用例") self.zhu_ce() def zhu_ce(self): print("注册测试用例") def test_a_search(self): print("搜索测试用例") # 如果你直接执行这个文件, 那么 就会执行下面的语句 # 否则.你执行其他文件, import这个文件的时候, 下面的代码就不会被执行 if __name__ == '__main__': # 执行当前文件中所有的unittest的测试用例 unittest.main()
true
fc2140852f04630def7f70b729721ec3b2d3b079
Python
nsmith0310/Programming-Challenges
/Python 3/LeetCode/lc1447.py
UTF-8
583
2.75
3
[]
no_license
from math import gcd class Solution: def simplifiedFractions(self, n: int) -> List[str]: f = [] nums = [i for i in range(1,n+1)] i = 0 while i<len(nums): j = 0 while j<i: num = nums[j] num2 = nums[i] r = gcd(num,num2) num//=r num2//=r if num2<=n: f.append(str(num)+"/"+str(num2)) j+=1 i+=1 return list(set(f))
true
725414201eff2d2869b94388f5042e21aadb4833
Python
leminhtr/kaggle
/Titanic/main_linreg-logreg.py
UTF-8
4,162
3.328125
3
[]
no_license
import numpy as np import pandas as pd import scipy from sklearn.linear_model import LinearRegression as linreg from sklearn.linear_model import LogisticRegression as logreg from sklearn.cross_validation import KFold from sklearn.cross_validation import * from sklearn import cross_validation titanic=pd.read_csv("train.csv") #print(titanic.describe()) #print(titanic.head(5)) # ------------------- DATA CORRECTION -------------------------------- # 1) Fill missing Age data with median titanic["Age"]=titanic["Age"].fillna(titanic["Age"].median()) # 2) Convert Sex string with 0 or 1 titanic.loc[titanic["Sex"] == "male", "Sex"] = 0 #convert 0 for men titanic.loc[titanic["Sex"] =="female", "Sex"]=1 #convert 1 for women # 3) Fill missing Embarked data with most common char print(pd.value_counts(titanic["Embarked"].values, sort=False)) # "S" is most common char -> chosen as default for missing values titanic["Embarked"]=titanic["Embarked"].fillna("S") #4) Replace Embarked char with numeric code #titanic.loc[titanic["Embarked"]=="S", "Embarked"]=0 # 'S' -> 0 #titanic.loc[titanic["Embarked"]=="C", "Embarked"]=1 # 'C' -> 1 titanic.loc[titanic["Embarked"]=="S", "Embarked"]=0 titanic.loc[titanic["Embarked"]=="C", "Embarked"]=1 titanic.loc[titanic["Embarked"]=="Q", "Embarked"]=2 # 'Q' -> 2 # input column used for predictions : predictors=["Pclass", "Sex", "Age", "SibSp", "Parch", "Fare", "Embarked"] # Initialize the algorithm algo_linreg = linreg() # Generate cross-validation folds with random splits # return rows indices for corresponding train and set kf =KFold(titanic.shape[0], n_folds=3, random_state=1) # Make the predictions predictions =[] for train, test in kf: # Which predictors used on train fold train_predictors = (titanic[predictors].iloc[train,:]) # Target/goal used to train the algo train_target= titanic["Survived"].iloc[train] # Train the algo with the predictors and target # .fit(x input, y output) algo_linreg.fit(train_predictors, train_target) # Make predictions with the trained algo on test fold test_predictions = algo_linreg.predict(titanic[predictors].iloc[test,:]) predictions.append(test_predictions) # The predictions are in 3 Numpy arrays # So we concatenate the arrays on axis 0 (bc only 1 axis) predictions=np.concatenate(predictions, axis=0) predictions[predictions> .5]=1 predictions[predictions<= .5]=0 print(predictions) print(sum(predictions==titanic["Survived"])) accuracy= sum(predictions==titanic["Survived"])/len(predictions) print(accuracy) # = 0.783 #------------------- Logistic Regression method --------------------- # Initialize the algo algo_logreg = logreg(random_state=1) # Compute accuracy score for all cross-V folds; # cross_val_score(algo, predictors, target, cross-validation fold) scores = cross_validation.cross_val_score(algo_logreg, titanic[predictors], titanic["Survived"], cv=3) # Mean of the scores for each folds (3 folds) print(scores.mean()) #----------------------------------- Log Reg. with test set --------------------- titanic_test = pd.read_csv("test.csv") # I) Clean data titanic_test["Age"] = titanic_test["Age"].fillna(titanic["Age"].median()) titanic_test["Fare"] = titanic_test["Fare"].fillna(titanic_test["Fare"].median()) titanic_test.loc[titanic_test["Sex"] == "male", "Sex"] = 0 titanic_test.loc[titanic_test["Sex"] == "female", "Sex"] = 1 titanic_test["Embarked"] = titanic_test["Embarked"].fillna("S") titanic_test.loc[titanic_test["Embarked"] == "S", "Embarked"] = 0 titanic_test.loc[titanic_test["Embarked"] == "C", "Embarked"] = 1 titanic_test.loc[titanic_test["Embarked"] == "Q", "Embarked"] = 2 # II) Test algo on data # Initialize the algo algo_logreg_test=logreg(random_state=1) # Train algo on using all training data algo_logreg_test.fit(titanic[predictors], titanic["Survived"]) # Make predictions with algo on data predictions=algo_logreg_test.predict(titanic_test[predictors]) # Generate new dataset for kaggle submission submission= pd.DataFrame({ "PassengerId" : titanic_test["PassengerId"], "Survived": predictions }) submission.to_csv("kaggle.csv", index=False)
true
0499c41e1e1fcedcdf9b6b481bc1396682e1ad10
Python
tammoj/upd-json-plotter
/src/udp_json_plotter/JsonParser.py
UTF-8
3,547
3.34375
3
[]
no_license
import collections import sys from collections import defaultdict import json class JsonParser: _recent_timestamp = None _recent_packet_counter = None def parse_into_x_y(self, json_string): obj_list = json.loads(json_string) if not isinstance(obj_list, list): print(f'The received message must be a json array! But the message is: "{obj_list}"', file=sys.stderr) exit(-1) x_values = defaultdict(list) y_values = {} for obj_dict in obj_list: if not isinstance(obj_dict, dict): print(f'The top level json array may only contains json objects.' f'The element "{obj_dict}" is not an json object!', file=sys.stderr) continue keys = obj_dict.keys() if 'PacketCounter' in keys: if len(keys) > 1: print(f'The json object contains the key "Paketcounter".' f' This object may in principle not contain other keys! But there are also this keys:' f' {keys}', file=sys.stderr) new_packet_counter = int(obj_dict['PacketCounter']) if not self._recent_packet_counter: self._recent_packet_counter = new_packet_counter continue diff_packet_counter = self._recent_packet_counter - new_packet_counter if diff_packet_counter > 1: print(str(diff_packet_counter) + " packets lost!") self._recent_packet_counter = new_packet_counter continue if 'Timestamp' in keys: if len(keys) > 1: print(f'The json object contains the key "Timestamp".' f' This object may in principle not contain other keys! But there are also this keys:' f' {keys}', file=sys.stderr) self._recent_timestamp = int(obj_dict['Timestamp']) continue if not self._recent_timestamp: print(f'No recent timestamp specified and therefore this keys {keys} are skipped!', file=sys.stderr) continue for key in keys: x_values[key].append(self._recent_timestamp) value = obj_dict[key] # primitive types if not isinstance(value, collections.Iterable): if key not in y_values: y_values[key] = list() y_values[key].append(value) continue # array type if isinstance(value, collections.Sequence): for index, value_value in enumerate(value): if key not in y_values: y_values[key] = defaultdict(list) y_values[key][index].append(value_value) continue # mapping types if isinstance(value, collections.Mapping): for value_key, value_value in value.items(): if key not in y_values: y_values[key] = defaultdict(list) y_values[key][value_key].append(value_value) continue print(f'Value "{value}" is not supported!', file=sys.stderr) x_values[key].pop() # undo `x_values[key].append(self._recent_timestamp)` return x_values, y_values
true
8fb5ac18a574efc0c9b2f874a3f6c7df34e3e6f6
Python
gloc-mike/100daysofcode-with-python-course
/days/55-57-uplink/demo/program.py
UTF-8
1,367
3.625
4
[ "MIT" ]
permissive
from blog_client import BlogClient def main(): val = 'RUN' while val: print("What would you like to do next?") val = input('[w]rite a post or [r]ead them?') if val == 'w': write_post() elif val == 'r': read_posts() def read_posts(): svc = BlogClient() response = svc.all_entries() posts = response.json() print() for idx, p in enumerate(posts, 1): print(" {}. [{:,} views] {}".format( idx, p.get('view_count'), p.get('title') )) print() selected = int(input('Which number to view? ')) selected_id = posts[selected - 1].get('id') response = svc.entry_by_id(selected_id) selected_post = response.json() print("Details for selected_post: {}".format(selected_post.get('id'))) print("Title: " + selected_post.get('title')) print("Written: " + selected_post.get('published')) print("Content: " + selected_post.get('content')) print() print() def write_post(): svc = BlogClient() title = input("Title: ") content = input("Body contents: ") view_count = int(input("view count (int): ")) resp = svc.create_new_entry(title, content, view_count) print() print("Created new post successfully: {}".format(resp.json().get('id'))) print() if __name__ == '__main__': main()
true
3e5ef510fdbfb9eb454197c390d30cb0e69645f9
Python
sergfreeman/my_progs
/tele_bot1/bot.py
UTF-8
1,137
2.703125
3
[]
no_license
#coding:utf-8 import telebot, config bot = telebot.TeleBot(config.token) @bot.message_handler(commands=["start"]) def start(message): bot.send_message(message.chat.id, "Привіт, я Telegram бот") @bot.message_handler(content_types=['text']) def handle_text_messages(message): if message.text == "Привет": bot.send_message(message.from_user.id, "Привет") elif message.text == "Кто ты?": bot.send_message(message.from_user.id, "Я тестовый чатбот для учебного примера.") elif message.text == "Как тебя зовут?": bot.send_message(message.from_user.id, "Меня зовут MyFirstTestBot.") elif message.text == "Что ты умеешь?": bot.send_message(message.from_user.id, "Я умею отвечать на несколько простых вопросов - кто я, как меня зовут и что я умею делать.") else: bot.send_message(message.from_user.id, "Я тебя не понимаю. Напиши что-то другое.") bot.polling()
true
07ed4cf86124cb8b848db77c7c7268fd0857494b
Python
EstherLacan/jiangfw
/python/testmysql.py
UTF-8
1,412
2.6875
3
[ "Apache-2.0" ]
permissive
# -*- coding: UTF-8 -*- from my_sql_conn import DbFunctions """ 权限:66 预置权限表数据 """ white_list = 'admin' white_list_array = white_list.split(",") white_list_array = sorted(set(white_list_array), key=white_list_array.index) # 去重 print(white_list_array) print 'white_list_array size is ' + str(len(white_list_array)) def initAllAppKey(): """ 预置所有appkey权限 """ appkeys = queryAppKey() appkey_list = [] for white_user in white_list_array: for appkey in appkeys: appkey_row = [] appkey_row.append(white_user) appkey_row.append(appkey[0]) appkey_row.append("20180320000000") appkey_list.append(appkey_row) insertPermissionList(appkey_list) print 3 def queryAppKey(): """ 查询appkey """ db = DbFunctions("localhost", "root", "Root@123", "db") result = db.mysql_qry("select appkey from table", 1) appkey_list = [] for app_key in result: if app_key not in appkey_list: appkey_list.append(app_key) print(appkey_list) print 'result size is ' + str(len(appkey_list)) return appkey_list def insertPermissionList(rows): """ 插入权限 """ db = DbFunctions("localhost", "root", "Root@123", "db") db.insert_by_many('table', rows) # print(result) if __name__ == '__main__': initAllAppKey()
true
6edf3d48361d34e343ff7f5e8b49a1ebc2749c66
Python
babshik/stepik-auto-tests-course
/get_method.py
UTF-8
182
2.578125
3
[]
no_license
#url = "https://prnt.sc/1npf96" from bs4 import BeautifulSoup import requests url = requests.get("https://prnt.sc/1npf96") soup = BeautifulSoup(url.text, "html.parser") print(soup)
true
e62a373438a0b8d3285a6236d606dd2ab83b807e
Python
ldunekac/Pandemic
/src/Level/level.py
UTF-8
4,186
3.53125
4
[]
no_license
from Level.City.city import City from Level.Deck.infection_deck import InfectionDeck from Level.Deck.player_deck import PlayerDeck from Level.Deck.Card.city_card import CityCard from Level.Disease.disease import Disease from Level.Player.player import Player class Level: """ Represents a single Level of the Expanded Pandemic Game """ def __init__(self): """ Initialize the Level """ self.diseases = [] self.cities = [] self.infectionDeck = None self.playerDeck = None self.players = [] self.setup() def setup(self): """ Setup the level for the start of the game """ self.setupCities() self.setupInfectionDeck() self.setupPlayerDeck() self.setupPlayers() self.addEpidemics() def setupCities(self): """ Setup the cities """ disease = Disease() self.diseases.append(disease) sanFrancisco = City("San Francisco", disease) chicago = City("Chicago", disease) atlanta = City("Atlanta", disease) montreal = City("Montreal", disease) newYork = City("New York", disease) washington = City("Washington", disease) london = City("London", disease) madrid = City("Madrid", disease) essen = City("Essen", disease) paris = City("Paris", disease) stPetersburg = City("St. Petersburg", disease) milan = City("Milan", disease) self.startingCity = atlanta self.cities.append(chicago) self.cities.append(sanFrancisco) self.cities.append(atlanta) self.cities.append(montreal) self.cities.append(newYork) self.cities.append(washington) self.cities.append(london) self.cities.append(madrid) self.cities.append(essen) self.cities.append(paris) self.cities.append(stPetersburg) self.cities.append(milan) self.makeCitiesAdjacent(chicago, sanFrancisco) self.makeCitiesAdjacent(chicago, atlanta) self.makeCitiesAdjacent(chicago, montreal) self.makeCitiesAdjacent(atlanta, washington) self.makeCitiesAdjacent(montreal, newYork) self.makeCitiesAdjacent(montreal, washington) self.makeCitiesAdjacent(newYork, washington) self.makeCitiesAdjacent(newYork, london) self.makeCitiesAdjacent(newYork, madrid) self.makeCitiesAdjacent(london, essen) self.makeCitiesAdjacent(london, paris) self.makeCitiesAdjacent(madrid, paris) self.makeCitiesAdjacent(essen, paris) self.makeCitiesAdjacent(essen, stPetersburg) self.makeCitiesAdjacent(essen, milan) self.makeCitiesAdjacent(paris, milan) def setupInfectionDeck(self): """ Setup the Infection Deck """ self.infectionDeck = InfectionDeck(self.cities) self.infectionDeck.shuffle() for infectionAmount in [3,2,1]: for i in range(3): city = self.infectionDeck.draw() city.infect(infectionAmount) def setupPlayerDeck(self): """ Setup the player Deck """ cityCards = [] for city in self.cities: cityCards.append(CityCard(city)) self.playerDeck = PlayerDeck(cityCards) self.playerDeck.shuffle() def setupPlayers(self): """ Setup Players """ # Choose roles # Add Players to list player = Player(self.startingCity) self.players.append(player) # Give each 4 cards in their hand for i in range(4): card = self.playerDeck.draw() player.addCardToHand(card) def addEpidemics(self): """ Setup Players """ # Add X Epidemics to the Player Deck def makeCitiesAdjacent(self, city1, city2): """ Make the two cities given adjacent """ city1.addAdjacentCity(city2) city2.addAdjacentCity(city1)
true
203abcdb9418f0170babe925bf4f3b7bd7e4d176
Python
wantwantwant/tutorial
/L2基础类型控制语句/作业二第八题.py
UTF-8
532
3.109375
3
[]
no_license
# 第八题 total = 0 for x in range(1,10): for y in range(0,10): for z in range(0,10): print(x,y,z,y,x) hui = x*10000 + y*1000 + z*100 + y*10 + x total += hui print(total) total = 0 for i in range(10000,99999+1): b5 =i // 10000 b4 =(i - b5*10000) // 1000 b3 =(i - b5*10000 - b4*1000) // 100 b2 =(i - b5*10000 - b4*1000 - b3*100) // 10 b1 =(i - b5*10000 - b4*1000 - b3*100 - b2*10) if b5 == b1 and b4 == b2: total += 1 print(i) print(total)
true
ad4a2069d8784ca1ea3d95fbf4f0ce9f7aa4c800
Python
Anthonina/lesson1
/training_numbers_and_strings.py
UTF-8
387
3.8125
4
[]
no_license
a = 2 b = 4.5 print(a + b) number_1 = int(input('Введите число от 1 до 10: ')) number_2 = float(input('Введите число от 1 до 10: ')) print(number_1 + 10) print(number_2 + 10) print(number_1//number_2) name = input('Введите Ваше имя: ') print('Привет, ' + name + '!' + ' Как дела?') float('1') int('2') bool(1) bool('') bool('0')
true
e0f9794252ae7d4d3c8230b4c5385307c52a3061
Python
elaynelemos/artificial-intelligence
/blind_search/breadth_search/breadth_search.py
UTF-8
777
2.984375
3
[ "MIT" ]
permissive
from models.node import Node from models.edge import Edge def width_search(problem, action): first_node = Node(state=problem.initial_state, action=action, path_cost=0) if problem.objective(first_node.state): return solution(first_node) search_edge = Edge(first_node) visited_nodes = [] while True: if search_edge.is_empty(): return 'failed' nd = search_edge.pop() visited_nodes.append(nd) for action in problem.actions(nd.state): son = nd.son_node(problem, action) if not(son in visited_nodes) or search_edge.nodes: if problem.objective(son.state): return solution(son) search_edge.insert(son) def solution(node): pass
true
718f3a473bbf36151369f12f5bbe606bbf2b40a5
Python
michaelforrest/grimonium
/resources/APIMidi/APIMidi/Utils.py
UTF-8
3,427
2.78125
3
[]
no_license
import Live from consts import * #from struct import * def getSong(): """Gets a the current Song instance""" return Live.Application.get_application().get_document() def getScene(num): scenes = getSong().scenes if num < len(scenes): return scenes[num] else: return None def getTrack(num): """Returns track number (num) (starting at 0)""" tracks = getSong().tracks if num < len(tracks): return tracks[num] else: return None def getClip(track,scene): track = getTrack(track) if track and scene < len(track.clip_slots): clipSlot = track.clip_slots[scene] if clipSlot.has_clip: return clipSlot.clip return None def getClipSlot(track,scene): track = getTrack(track) if track and scene < len(track.clip_slots): return track.clip_slots[scene] return None def translateString(text): """ Convert a string into a sysex safe string """ result = () length = len(text) for i in range(0,length): charCode = ord(text[i]) if charCode < 32: charCode = 32 elif charCode > 127: charCode = 127 result = (result + (charCode,)) return result def mapAllCCs(script_handle, midi_map_handle, channel): for cc in range(127): Live.MidiMap.forward_midi_cc(script_handle,midi_map_handle,channel,cc) def mapAllNotes(script_handle, midi_map_handle, channel): for note in range(127): Live.MidiMap.forward_midi_note(script_handle,midi_map_handle,channel,note) def intToMidi(int): """ Convert an unsigned int to a tuple containing two 7-bit bytes. Max int value is 14 bits i.e. 16384 """ msb = (int >> 7) & 0x7F lsb = int & 0x7F return (msb,lsb) #def midiToFloat(midi): # """ # Convert a 5 byte list of midi data (i.e. 7 bit bytes) to a float # """ # #m1 = midi[0] # m2 = midi[1] # m3 = midi[2] # m4 = midi[3] # #m5 = midi[4] # # # bottom 4 bits of m1 and top 4 bits of m2 # o1 = ((midi[0] & 0x0F) << 4) + ((m2 & 0x78) >> 3) # # bottom 3 bits of m2 and top 5 bits of m3 # o2 = ((m2 & 0x07) << 5) + ((m3 & 0x7C) >> 2) # # bottom 2 bits of m3 and top 6 bits of m4 # o3 = ((m3 & 0x03) << 6) + ((m4 & 0x7E) >> 1) # # bottom bit of m4 and m5 # o4 = ((m4 & 0x01) << 7) + midi[4] # # return unpack("f",chr(o1) + chr(o2) + chr(o3) + chr(o4)) # #def floatToMidi(x): # """ # Convert a float to midi, a list of 5 7-bit bytes # """ # s = pack("f",x) # b1 = ord(s[0]) # b2 = ord(s[1]) # b3 = ord(s[2]) # b4 = ord(s[3]) # # #print(hex(b1) + " " + hex(b2) + " " + hex(b3) + " " + hex(b4)) # #print(tobin(b1) + tobin(b2) + tobin(b3) + tobin(b4)) # # # Create 5 7-bit bytes. # # Top 4 bits of b1 # o1 = (b1 & 0xF0) >> 4; # # bottom 4 bits of b1 and top 3 of b2 # o2 = ((b1 & 0x0F) << 3) + ((b2 & 0xE0) >> 5) # # bottom 5 of b2 and top 2 of b3 # o3 = ((b2 & 0x1F) << 2) + ((b3 & 0xC0) >> 6) # # bottom 6 of b3 and top bit of b4 # o4 = ((b3 & 0x3F) << 1) + ((b4 & 0x80) >> 7) # # bottom 7 of b4 # o5 = b4 & 0x7F; # # #print(tobin(o1,4) + tobin(o2,7) + tobin(o3,7) + tobin(o4,7) + tobin(o5,7)) # # return [o1,o2,o3,o4,o5]
true
6e02367a562caeafc3daeede6971b1de086dae89
Python
stellakaniaru/practice_solutions
/3rd_person_singular_function.py
UTF-8
999
4.21875
4
[]
no_license
''' The third persom singular verb form in English can be distinguished using the following rules: 1. If the verb ends with y, remove it and add ies. 2. If the verb ends in o, ch, s, sh, x or z, add es. 3. By default just add s. Create a function make_3sg_form() that given a verb in infinitive form returns its third person singular form. Test your function with words like: try, brush, run and fix. Note:Regard the rules as heuristic,ie don't expect them to work for all cases. Look at the string method endswith() ''' def make_3sg_form(verb): #checks if the verb ends with y, removes it and replaces it with ies if it evaluates to true if verb.endswith('y') == True: return verb.replace('y', 'ies') #checks if the verb ends with any of the strings in the tuple and adds es if it evaluates to true elif verb.endswith(('o', 'ch', 's', 'sh', 'x', 'z')) == True: return verb + 'es' #adds an s to the verb if it doesnt meet any of the criteria above else: return verb + 's'
true
acab8331fa1e2917c645f52b4417b8d4d5b93f71
Python
Juboo/xbotpp
/xbotpp/protocol/irc.py
UTF-8
7,497
2.625
3
[]
no_license
import xbotpp import irc.client as irclib_client import irc.dict as irclib_dict import irc.bot as irclib_bot import irc.buffer as irclib_buffer import irc.modes as irclib_modes from xbotpp import debug from xbotpp import handler class ServerSpec: '''\ An IRC server specification. >>> ServerSpec('irc.stormbit.net') <ServerSpec host='irc.stormbit.net', port=6667, password=None> >>> ServerSpec('irc.stormbit.net', 6665) <ServerSpec host='irc.stormbit.net', port=6665, password=None> >>> ServerSpec('my.znc.instance', 6666, 'username:password') <ServerSpec host='my.znc.instance', port=6666, password='username:password'> Coercing a ServerSpec object to a string will give you the host and port, but not the password: >>> str(ServerSpec('my.znc.instance', 6666, 'username:password')) 'my.znc.instance:6666' ''' def __init__(self, host, port=6667, password=None): self.host = host self.port = port self.password = password def __str__(self): return '%s:%d' % (self.host, self.port) def __repr__(self): s = "<ServerSpec host={host}, port={port}, password={password}>" return s.format(host=repr(self.host), port=repr(self.port), password=repr(self.password)) class irc(irclib_client.SimpleIRCClient): '''\ Our IRC client class. ''' def __init__(self): super(irc, self).__init__() debug.write('Initialized IRC protocol library.', debug.levels.Info) self.network = xbotpp.config['networks'][xbotpp.state.network] self.channels = irclib_dict.IRCDict() self._nickname = self.network['nick'] self._realname = xbotpp.config['bot']['owner'] debug.write('Nickname: %s' % self._nickname, debug.levels.Info) debug.write('Realname: %s' % self._realname, debug.levels.Info) # Get hosts from the config and transform them into ServerSpec objects self.hosts = [] serverpass = self.network['server_password'] if 'server_password' in self.network else None for host in [s.strip() for s in self.network['servers']]: host = host.split(":") self.hosts.append(ServerSpec(host[0], int(host[1]), serverpass)) # add events _on_events = [ 'disconnect', 'join', 'kick', 'mode', 'namreply', 'nick', 'part', 'quit', 'nicknameinuse', 'welcome', ] for event in _on_events: self.connection.add_global_handler(event, getattr(self, '_on_' + event, None), -20) for event in ['privmsg', 'pubmsg', 'privnotice', 'pubnotice']: self.connection.add_global_handler(event, self.generic_message, -20) LineBuffer = irclib_buffer.DecodingLineBuffer LineBuffer.errors = 'replace' self.connection.buffer_class = LineBuffer def _connect(self): server = self.hosts[0] try: debug.write('Connecting to %s...' % server, debug.levels.Info) self.connect(server.host, server.port, self._nickname, server.password, ircname=self._realname) except irclib_client.ServerConnectionError: debug.write('Error connecting to %s' % server, debug.levels.Info) def _on_disconnect(self, client, event): debug.write('Disconnected.', debug.levels.Info) self.channels = irclib_dict.IRCDict() def _on_join(self, client, event): channel = event.target nick = event.source.nick if nick == client.get_nickname(): self.channels[channel] = irclib_bot.Channel() self.channels[channel].add_user(nick) handler.handlers.on_user_join(handler.event.user_join(nick)) def _on_kick(self, client, event): nick = event.arguments[0] channel = event.target if nick == client.get_nickname(): del self.channels[channel] else: self.channels[channel].remove_user(nick) def _on_mode(self, client, event): modes = irclib_modes.parse_channel_modes(" ".join(event.arguments)) target = event.target if irclib_client.is_channel(target): channel = self.channels[target] for mode in modes: if mode[0] == "+": f = channel.set_mode else: f = channel.clear_mode f(mode[1], mode[2]) else: pass def _on_namreply(self, client, event): channel = event.arguments[1] for nick in event.arguments[2].split(): nick_modes = [] if nick[0] in self.connection.features.prefix: nick_modes.append(self.connection.features.prefix[nick[0]]) nick = nick[1:] for mode in nick_modes: self.channels[channel].set_mode(mode, nick) self.channels[channel].add_user(nick) def _on_nick(self, client, event): before = event.source.nick after = event.target for channel in self.channels.values(): if channel.has_user(before): channel.change_nick(before, after) handler.handlers.on_user_change_nick(handler.event.user_change_nick(before, after)) def _on_part(self, client, event): nick = event.source.nick channel = event.target if nick == client.get_nickname(): del self.channels[channel] else: self.channels[channel].remove_user(nick) handler.handlers.on_user_part(handler.event.user_part(nick)) def _on_quit(self, client, event): nick = event.source.nick for channel in self.channels.values(): if channel.has_user(nick): channel.remove_user(nick) handler.handlers.on_user_part(handler.event.user_part(nick)) def _on_nicknameinuse(self, client, event): debug.write('Nickname in use, appending an underscore.', debug.levels.Info) client.nick(client.get_nickname() + "_") def _on_welcome(self, client, event): debug.write('Connected, joining channels.', debug.levels.Info) for channel in [s.strip() for s in self.network['channels']]: client.join(channel) def get_nickname(self): '''\ Returns the current bot nickname. ''' return self.connection.get_nickname() def send_message(self, target, message): '''\ Sends `message` to `target` on the server. ''' self.connection.privmsg(target, message) def generic_message(self, client, event): '''\ Generic IRC message handler. Dispatches message events with the correct type when they are recieved from the server. This function is called by the underlying IRC library, and should not be called by the user. ''' h = handler.event.message(event.source.nick, event.target, event.arguments[0], event.type) handler.handlers.on_message(h) def disconnect(self, message="See ya~"): '''\ Disconnect from the server, with the quit message `message`. ''' debug.write('Disconnecting: %s' % message, debug.levels.Info) self.connection.disconnect(message) def get_version(self): return 'xbot++ %s' % xbotpp.__version__ def start(self): '''\ Connect to the server, and wait for events to process. ''' self._connect() self.ircobj.process_forever()
true
b7e988800d0fbd3704bbf30366c29e08364f0243
Python
tabris233/csdnspider
/studyscrapy/spiders/studyscrapy.py
UTF-8
2,071
2.515625
3
[]
no_license
from scrapy.http import Request from scrapy.selector import Selector from scrapy.spiders import CrawlSpider from studyscrapy.items import StudyscrapyItem class studyscrapy(CrawlSpider): name = 'studyscrapy' url = 'https://blog.csdn.net/nav/game' start_urls = [ url, # 'http://ip.chinaz.com/getip.aspx', ] def start_requests(self): # urls = [self.url for i in range(1, 20)] # for url in urls: while True: for _ in range(10): yield Request(self.url, callback=self.parse_detail, dont_filter=True) def parse_detail(self, response): self.log(' >>>>>>>>>>>>>>>>>> 点开<首页>的URL了 <<<<<<<<<<<<<<<<<< ') selector = Selector(response) infos = selector.xpath('//*[@id="feedlist_id"]/li') # print(len(infos)) # print(infos,' ---------------------------------------- ') cnt = 0 for info in infos: cnt += 1 if cnt == len(infos): break # print(info, ' <<<<<< ',cnt) # title = info.xpath('div/div[2]/h2/a/text()').extract()[0].strip() _url = info.xpath('div/div[2]/h2/a/@href').extract()[0] yield Request(url=_url, callback=self.parse) def parse(self, response): # 将我们需要的数据都解析出来 并交给CsdnspiderPipeline管道处理 self.log(' >>>>>>>>>>>>>>>>>> 点开文章的URL了 <<<<<<<<<<<<<<<<<< ') item = StudyscrapyItem() selector = Selector(response) # article_title = selector.css('.title-articlel') # created_time = selector.css('.time') article_title = selector.xpath('//h6[@class="title-article"]/text()').extract()[0] created_time = selector.xpath('//span[@class="time"]/text()').extract()[0] # print(type(article_title)) # print(article_title) # print(type(created_time)) # print(created_time) item['title'] = article_title item['time'] = created_time yield item
true
8c3831e133b1bbca57e3d360f96fb02f67ca5a42
Python
sleepinghungry/wwif
/students/shane/shanemichaelaaronagee.py
UTF-8
14,569
2.6875
3
[ "MIT" ]
permissive
import os import sys sys.path.insert(1, os.path.join(sys.path[0], '../..')) sys.path.insert(1, os.path.join(sys.path[0], '../../bwx_adventure')) from bwx_adventure.advent import * from bwx_adventure.advent_devtools import * game = Game("Willow Of Death") playername = input("GUARDIAN ANGEL: What is your name???") print("GUARDIAN ANGEL: Well", playername, "it's nice to see someone is alive, you are one of few survivers of the zombie apacolypse." " But you are blind, don't worry I am your guardian angel, I will explain everything as you go... good luck...") porch = game.new_location( "Porch", "You are on a porch. It is raining. To the North is an office door to the west is a ramp.") player = game.new_player(porch) vestibule = game.new_location( "Vestibule", "This is a small area at the bottom of a flight of stairs. There is an office door to the west. You have an exit from where you came.") upstairs = game.new_location( "Upstairs Hallway", "This is a hallway with a door to the east, And stairs going down.") office = game.new_location( "Office", """A nicely organized office. There is a door to the east.""") family = game.new_location( "Family Room", "This is a large room with a fridge and an open window to the north. There is also a door to the west.") yard = game.new_location( "Yard", "This is a small yard with barbed wire all around. There is only an exit from were you came.") step = game.new_location( "Stepway", "This is a stepway with a door to the north and west.") computer = game.new_location( "Computer Lab", "This is a small room with tables spread out in the room, and a door to the south.") lindas_room = game.new_location( "Room Four", "This is a skinny but tall room. There is a whitebourd here, and a door to the west.") storage_room1 = game.new_location( "Storage Room 1", "This is a large room with boxes of school suplies.") storage_room2 = game.new_location( "Storage Room 2", "More boxes and more boxes...") key = office.new_object( "key", "this is a bronze small key") linda_porch = game.new_location( "Small Blacktop Porch", "This is a small porch outside. Broken, crashed cars, all around the blacktop") blacktop = game.new_location( "Blacktop Pathway", "There is one small pathway to a room leading North.") split_RLS = game.new_location( "Three way fork", "There are three rooms here. One to the West saying SUE, another to the North saying ROBIN, and the final to the East saying LACY.") robins_room = game.new_location( "Robins room", "This is an all around large room, with a nice opening to the East.") sues_room = game.new_location( "Sues room", "A small well organized room.") lacys_room = game.new_location( "Lacys room", "It's to dark to see anything in here.") secretrd = game.new_location( "Robins room prt2", "This is a very tight space, with tons of book shelfs. There is a trap door under a wooden chair.") mazelol = game.new_location( "Maze room one LOL", "This is a room going in four different directions no ends to be seen.") secret_maze = game.new_location( "Secret Room In Maze", "This is a room with very nice decorations.") apple_sword = secret_maze.new_object( "ninja sword", "ancient ninja sword") bathroom1 = game.new_location( "Bathroom 1", "Small bathroom with a toilet and a sink.") cellar_passage = game.new_location( "Cellar Passage", "This is a dark cold hallway that leeds north.") cellar_passage1 = game.new_location( "Cellar Passage", "small empty corner room with passage west, and from where you came.") cellar_passage2 = game.new_location( "Cellar Passage", "cold empty hallway leading west.") cellar_barn = game.new_location( "Cellar Passage", "This is a small area with a ladder leading to a trap door.") barn = game.new_location( "Barn", "This is a lardge barn with a stage to the north, and a door to the south.") bkey = Object("skull key", "Small barn shaped key.") stage = game.new_location( "Stage", "Large stage with a wordrobe, and a passage to the west.") barn_water = game.new_location( "Barn House", "Small barn area, there is a water fountain here.") barn_house_on_shore = game.new_location( "Barn House", "Dirty old cabin. There is only one exit to the north.") lake_shore = game.new_location( "Lake Shore", "This is the shore of Emigrant Lake. Emigrant Lake has been turned into acid. There is a hut to the west, and a pathway to the east.") old_hut = game.new_location( "Old Hut", "This is an old hut on the shore.") lake_shore1 = game.new_location( "lake Shore", "This is another part of the lake shore. There is a pathway to the east and south. !ON'TD OG OUHST! says a sighn in mysterios letters.") terrorrized = game.new_location( "You Die Here", "The END!!!:)") game_over_location = game.new_location( "You Are Dead", "Sorry, you have died. There's nowhere to go except to exit the program.") lake_shorewall = game.new_location( "Lake Shore", "This is a lake shore with a wall in front of you. There is no way around.") lake_shore2 = game.new_location( "Lake Shore", "There is now a wall behind you that is non climable. But there is a passage to the north.") barn_porch = game.new_location( "Barn Entree", "Small porch with a pathway leading south.") game.new_connection("barn_entree", stage, barn_porch, [IN, WEST], [OUT]) game.new_connection("Game Over", terrorrized, game_over_location, [IN, OUT, NORTH, EAST, WEST, SOUTH, NORTH_EAST, NORTH_WEST, SOUTH_EAST, SOUTH_WEST], [NOT_DIRECTION]) game.new_connection("What3", lake_shore1, terrorrized, [IN, SOUTH], [OUT, NORTH]) game.new_connection("Wall", lake_shore1, lake_shorewall, [IN, EAST], [OUT, WEST]) game.new_connection("What2", lake_shore, lake_shore1, [IN, EAST], [OUT, WEST]) game.player.health = 30 game.new_connection("What1", lake_shore, old_hut, [IN, WEST], [OUT, EAST]) game.new_connection("What", barn_house_on_shore, lake_shore, [IN, NORTH], [OUT, SOUTH]) game.new_connection("Mini Barn", stage, barn_water, [IN, WEST], [OUT, EAST]) game.new_connection("Stage Way", barn, stage, [IN, NORTH], [OUT, SOUTH]) araya = game.new_connection("Trap Barn Door", cellar_barn, barn, [IN, UP], [OUT, DOWN]) araya.make_requirement(bkey) game.new_connection("Cellar to Cellar Barn", cellar_passage2, cellar_barn, [IN, WEST], [OUT, EAST]) game.new_connection("Secret Cellar1", cellar_passage1, cellar_passage2, [IN, WEST], [OUT, EAST]) game.new_connection("Secret Sellar", cellar_passage, cellar_passage1, [IN,NORTH], [OUT, SOUTH]) game.new_connection("Bathroom Door", family, bathroom1, [IN, WEST], [OUT, EAST]) game.new_connection("Secret lol", mazelol, secret_maze, [IN, DOWN], [OUT, UP]) game.new_connection("secret robin", robins_room, secretrd, [IN, EAST], [OUT, WEST]) game.new_connection("Porch Top", linda_porch, blacktop, [IN, NORTH], [OUT, SOUTH]) game.new_connection("Robins door", split_RLS, robins_room, [IN, NORTH], [OUT, SOUTH]) game.new_connection("Sues door", split_RLS, sues_room, [IN, WEST], [OUT, EAST]) game.new_connection("Lacys door", split_RLS, lacys_room, [IN, EAST], [OUT, WEST]) game.new_connection("Blacktop to RLS.", blacktop, split_RLS, [IN, NORTH], [OUT, SOUTH]) game.new_connection("Lindas porch door", lindas_room, linda_porch, [IN, WEST], [OUT, EAST]) game.new_connection("Storage2door", upstairs, storage_room2, [IN, WEST], [OUT, EAST]) game.new_connection("Storage1door", upstairs, storage_room1, [IN, EAST], [OUT, WEST]) andre = game.new_connection("Linda Lab", computer, lindas_room, [IN, SOUTH], [OUT, NORTH]) andre.make_requirement(key) game.new_connection("Lab Door", step, computer, [IN, WEST], [OUT, EAST]) game.new_connection("Vestibule Door Outside", step, family, [IN, NORTH], [OUT, SOUTH]) game.new_connection("Ramp", porch, step, [IN, WEST], [OUT, EAST]) game.new_connection("Window", family, yard, [IN, NORTH], [OUT, SOUTH]) game.new_connection("Stairs", vestibule, upstairs, [IN, UP], [OUT, DOWN]) game.new_connection("Vestibule Door", porch, vestibule, [IN, NORTH], [OUT, SOUTH]) game.new_connection("Office Door", vestibule, office, [IN, WEST], [OUT, EAST]) ropecoil = Object("rope", "long coil of rope") pickaxe = Object("pickaxe", "smallish iron pickaxe") open_wardrobe = False cellar_barn.add_object(pickaxe) def open_wardrobe(game,thing): global stage open_wardrobe = True game.output("You open the wardrobe revealing a coil of rope") stage.add_object(ropecoil) stage.add_phrase("open wardrobe", open_wardrobe) dog = Pet("Dog") dog.set_location(porch) dog.set_allowed_locations([porch]) yard.add_object(Drink("vial", "a small vial of bright green glowing liquid", Die("choking violently and collapsing onto the floor..."), Object("empty vial", "an empty vial with an acrid odor"))) bathroom1.new_object("pile of toilet paper", "soggy pile of toilet paper on floor.") whiteboard_markedup = False marker = Object("marker", "small red marker") keys = Object("pair of keys", "small pair of keys") def draw_on_whiteboard(game,thing): global whiteboard_markedup whiteboard_markedup = True game.output("You write on the board without thinking, it seems you wrote some sort of spell. !ERAD TI!") marker.add_phrase(["draw on whiteboard", "draw on board"], draw_on_whiteboard) def climb_wall(game,thing): global lake_shorewall if not "ropecoil" and "pickaxe" in game.player.inventory: game.output("You cannot climb this without something sharp that hooks to a rope, and a rope of course.") else: game.output("Uising the pickaxe and the rope you climb the wall.") player.set_location(lake_shore2) lake_shorewall.add_phrase("climb wall", climb_wall) storage_room1.add_object(marker) armor = vestibule.new_object("armor", "a shiny pair of armor") def put_on_armor(game,thing): game.player.health += 20 game.output("You put the armor and become more protected.") armor.add_phrase(["wear armor", "put on armor", "equip armor"], put_on_armor) talk_to_man = False old_fisherman = Actor("Old skinny fisherman") old_fisherman.set_location(old_hut) def talk_to_man(game,thing): global old_hut talk_to_man = True game.output("The old fisherman says he will trade you a fish, for a boat that will " "lead you back to the school. There is a fishing pole in the cellar of " "the cabin on the northern side of the island. Here are the keys, he sets down a pair of keys.""") old_hut.add_object(keys) old_fisherman.add_phrase(["talk to man", "talk to old man", "talk to old fisherman"], talk_to_man) talk_to_ft = False fortune_teller = Actor("Fortune teller") fortune_teller.set_location(terrorrized) def talk_to_ft(game,thing): global terrorrized global playername talk_to_ft = True playerftanswer = input("Would you like me to tell you your fortune...?") if playerftanswer == "yes": game.output(playername, "Your future is that you are stuck in this room forever..." "you will either quit... or restart...(: Good Bye", playername) else: game.output("Yor loss... tell me when you want to know...") fortune_teller.add_phrase(["talk to fortune teller", "talk to ft"], talk_to_ft) morokunda = None def read_board(game,thing): global whiteboard_markedup global morokunda if whiteboard_markedup: morokunda = Actor("Huge Three Headed Monster.") morokunda.set_location(thing) morokunda.add_phrase("fight monster", fight_morokunda) game.add_actor(morokunda) game.output("A giant 3 headed monster has appeared in the room!") else: game.output("There is nothing to read.") lindas_room.add_phrase(["read board", "read whiteboard"], read_board) zombie = Animal("zombie") zombie.set_location(yard) zombie.set_allowed_locations([yard]) game.add_actor(zombie) zombie.add_phrase("fight zombie", Say("you kill the zombie with a massive stab through the heart.")) miniz = Actor("tiny zombie") miniz.set_location(family) game.add_actor(miniz) knife = office.new_object("knife", "a rusty old knife") def fight_miniz(game, thing): if not "knife" in game.player.inventory: game.output("You try to stab the zombie with the knife, but it bites you.") game.output("You turn to the undead.") player.terminate() else: game.output("You stab the zombie and as you do he shreeks and dies.") miniz.terminate() miniz.add_phrase("fight tiny zombie", fight_miniz) def fight_morokunda(game, thing): if "ninja sword" in game.player.inventory: game.output("Using the ninja sword to avoid it's giant tenticles, and stab the giant beast.") game.output("the monster drops a skull key") lindas_room.add_object(bkey) morokunda.terminate() else: game.output("You have nothing strong enouph to peirce it's skin.") game.output("You have died.") player.terminate() drink_water = False def drink_water(game,thing): global barn_water global barn_house_on_shore global drink_water drink_water = True player.set_location(barn_house_on_shore) game.output("When drinking the water you telaport to a new place. Have a look around.") barn_water.add_phrase("drink water", drink_water) open_fridge = False def open_fridge(game,thing): global family open_fridge = True print("You open the fridge, revealing a bottle of soda.") family.add_phrase("open fridge", open_fridge) pull_soda = False def pull_soda(game,thing): global family global cellar_passage pull_soda = True print("The Dr.Pepper was a lever and you pulled it. A secret passage has apeared from inside the fridge.") game.new_connection("Secret Fridge Passage", family, cellar_passage, [IN, EAST], [OUT, WEST]) family.add_phrase(["take soda", "take dr.pepper", "take Dr.Pepper"], pull_soda) def move_chair(game, thing): global secretrd global mazelol game.new_connection("Trap door RR", secretrd, mazelol, [IN, DOWN], [OUT, UP]) game.output("The chair has been moved.") secretrd.add_phrase(["move chair", "pull chair"], move_chair) game.add_actor(player) game.add_actor(dog) game.run()
true
a5fec7f2cfa9c279621838a883a1a94f932f9fdd
Python
RyanPoy/sweet_orm
/tests/unit/test_mysql_recordset_update.py
UTF-8
2,181
2.515625
3
[]
no_license
#coding: utf8 import unittest from sweet_orm.db import MySQL from sweet_orm.db.recordset import MySQLRecordset from unittest import mock class TestMySQLRecordsetUpdate(unittest.TestCase): def get_db(self): class FakeDB(mock.MagicMock): qutotation = '`' paramstyle = '%s' FakeDB.aqm = MySQL.aqm return FakeDB() def test_update(self): db = self.get_db() db.execute_rowcount = mock.MagicMock(return_value=3) tb = MySQLRecordset(db=db, tablename='users').where(age__gte=40).or_where(name="Ryan") tb.update(age=20, name='nothing') db.execute_rowcount.assert_called_once_with('UPDATE `users` SET `age` = %s, `name` = %s WHERE `age` >= %s OR `name` = %s', *[20, 'nothing', 40, 'Ryan']) def test_update_with_join(self): db = self.get_db() db.execute_rowcount = mock.MagicMock(return_value=3) tb = MySQLRecordset(db=db, tablename='users').where(id=[1,2,3]).or_where(name="Ryan").join('cars', "users.id=cars.user_id") tb.update(name='nothing') db.execute_rowcount.assert_called_once_with('UPDATE `users` INNER JOIN `cars` ON `users`.`id` = `cars`.`user_id` SET `name` = %s WHERE `id` IN (%s, %s, %s) OR `name` = %s', *['nothing', 1, 2, 3, 'Ryan']) def test_increase(self): db = self.get_db() db.execute_rowcount = mock.MagicMock(return_value=3) tb = MySQLRecordset(db=db, tablename='users').where(age__gte=40).or_where(name="Ryan") tb.increase(age=10, score=20) db.execute_rowcount.assert_called_once_with('UPDATE `users` SET `age` = `age` + %s, `score` = `score` + %s WHERE `age` >= %s OR `name` = %s', *[10, 20, 40, 'Ryan']) def test_decrease(self): db = self.get_db() db.execute_rowcount = mock.MagicMock(return_value=3) tb = MySQLRecordset(db=db, tablename='users').where(age__gte=40).or_where(name="Ryan") tb.decrease(age=10, score=20) db.execute_rowcount.assert_called_once_with('UPDATE `users` SET `age` = `age` - %s, `score` = `score` - %s WHERE `age` >= %s OR `name` = %s', *[10, 20, 40, 'Ryan']) if __name__ == '__main__': unittest.main()
true
f70c51c73a3c3b874cb9412e7b69b33319ec2852
Python
Onthesly/Bitcamp212
/MiniProject(OpenCV)/bill/bill_candidate.py
UTF-8
2,116
2.890625
3
[]
no_license
import cv2 import numpy as np def color_candidate_image(image, center): h, w = image.shape[:2] fill = np.zeros((h+2, w+2), np.uint8) # 채움 행렬 dif1, dif2 = (25,25,25), (25,25,25) # 채움 색상 범위 flags = 0xff00 + 4 + cv2.FLOODFILL_FIXED_RANGE # 채움 방향 및 방법 flags += cv2.FLOODFILL_MASK_ONLY # 결과 영상만 채움 ## 후보 영역을 유사 컬러로 채우기 pts = np.random.randint(-15, 15, (20,2)) # 임의 좌표 20개 생성 pts = pts + center # 중심 좌표로 평행이동 for x, y in pts: # 임의 좌표 순회 if 0 <= x < w and 0 <= y < h: # 후보 영역 내부이면 _, _, fill, _ = cv2.floodFill(image, fill, (x, y), 255, dif1, dif2, flags) # 채움 누적 return cv2.threshold(fill, 120, 255, cv2.THRESH_BINARY)[1] def move_num(image, rect): center, (w, h), angle = rect # 중심 좌표, 크기, 회전 각도 if w < h : # 세로가 긴 영역이면 w, h = h, w # 가로와 세로 맞바꿈 angle += 90 # 회전 각도 조정 size = image.shape[1::-1] # 행태와 크기는 역순 mov_mat = np.float32([[1,0,8], [0,1,0]]) # x,y 이미지 이동 계산 mov_img = cv2.warpAffine(image, mov_mat, size) # 이동 변환 crop_img = cv2.getRectSubPix(mov_img, (w, h), center) # 후보 영역 가져오기 # crop_img = cv2.cvtColor(crop_img, cv2.COLOR_BGR2GRAY) # 명암도 영상 return crop_img
true
f3eb955ba2ad6133766b641d850961f4a27aaead
Python
ashxo/pyprojects
/updated-course-work-7.py
UTF-8
1,969
2.609375
3
[]
no_license
from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support.wait import WebDriverWait def test_google(): driver = webdriver.Chrome(); wait = WebDriverWait(driver, 5) driver.get("http://localhost/litecart/public_html") #goods = int(driver.find_element_by_xpath("//span[@class='quantity']").text) cart_quantity_css = 'span[class="quantity"]' cart_quantity = driver.find_element_by_css_selector(cart_quantity_css) for i in range(3): goods = cart_quantity.text products_css = 'div[id="box-campaign-products"]' first_product_css = products_css + ' div[class^="col-xs"]:nth-child(1)' first_product = driver.find_element_by_css_selector(first_product_css) first_product.click() #driver.find_element_by_xpath("//img[@alt='Yellow Duck']").click() try: size_presence = wait.until( ec.presence_of_element_located((By.CSS_SELECTOR, 'select[class ="form-control"] option[value="Small"]'))) except: size_presence = False if size_presence: driver.find_element_by_css_selector('select[class ="form-control"] option[value="Small"]').click() #driver.find_element_by_css_selector('select[class ="form-control"] option[value="Small"]').click() driver.find_element_by_xpath("//button[@name='add_cart_product']").click() driver.find_element_by_xpath("//a[@href='/litecart/public_html/']").click() plus_one = str(int(goods) + 1) wait.until(ec.text_to_be_present_in_element((By.CSS_SELECTOR, cart_quantity_css), plus_one)) driver.find_element_by_id('cart').click() WebDriverWait(driver, 7).until(ec.element_to_be_clickable((By.XPATH, "//button[@name='remove_cart_item']"))) driver.find_element_by_xpath("//button[@name='remove_cart_item']").click() driver.quit()
true
8641187e375d69d677682a7bc51c01113c0082f0
Python
Deep-Karmokar/Python_Practice_Problems
/char_using_num.py
UTF-8
418
3.8125
4
[]
no_license
dictionary = {2: ["a", "b", "c"], 3: ["d", "e", "f"], 4: ["g", "h", "i"], 5: ["j", "k", "l"], 6: ["m", "n", "o"], 7: ["p", "q", "r", "s"], 8: ["t", "u", "v"], 9: ["w", "x", "y", "z"]} string = input("Enter series of number: ") # 9999335533 output = "" for i in range(len(string)): for key in string: value = string.count(key) - 1 output += dictionary[int(key)][value] break print(output)
true
33821c327c9e56d27fcbb2659f4b68318efc5985
Python
ajaycharan/vehicles
/src/vehicles/library/worlds/utils.py
UTF-8
1,367
2.71875
3
[]
no_license
from . import PolyLine, contract, np @contract(cell_width='>0') def random_checkerboard(cell_width): ''' Utility function to obtain a random checker board. ''' texture = ['vehicles.library.textures.RandomCheckerboard', dict(cell_width=cell_width, seed=np.random.randint(100000))] return texture @contract(cell_width='>0', sigma='>0') def random_checkerboard_smooth(cell_width, sigma): ''' Utility function to obtain a smoothed random checker board. ''' texture = ['vehicles.library.textures.RandomCheckerboard', dict(cell_width=cell_width, seed=np.random.randint(100000))] return ['vehicles.library.textures.Smoothed', dict(sigma=sigma, texture=texture)] def blackwhite_checkerboard(cell_width): texture = ['vehicles.library.textures.BWCheckerboard', dict(cell_width=cell_width)] return texture def box(id_object, texture, width, length): ''' Returns a box. ''' points = [[-1, -1], [-1, 1], [1, 1], [1, -1], [-1, -1]] points = [(np.array(p) * np.array([width, length])).tolist() for p in points] return PolyLine(id_object=id_object, tags=[], texture=texture, points=points) class Counter: def __init__(self, start=0): self.k = start - 1 def __call__(self): self.k += 1 return self.k
true
005857005d600df88a4159f921ac6c18b0be1a17
Python
Darkhyp/python-games-for-kids
/snake_pygame/cube.py
UTF-8
1,905
3.265625
3
[]
no_license
from .CONFIGS import * import pygame class Cube: def __init__(self, surface, pos, color=SNAKE_COLOR, dir=(1, 0)): self.surface = surface self.pos = pos self.color = color self.dir = dir def move(self, dir): self.dir = dir # is reached boundaries isMove = True if dir[0] == -1 and self.pos[0] <= 0: isMove = False self.pos = (N_ROWS - 1, self.pos[1]) if dir[0] == 1 and self.pos[0] >= N_ROWS - 1: isMove = False self.pos = (0, self.pos[1]) if dir[1] == 1 and self.pos[1] >= N_COLS - 1: isMove = False self.pos = (self.pos[0], 0) if dir[1] == -1 and self.pos[1] <= 0: isMove = False self.pos = (self.pos[0], N_COLS - 1) if isMove: self.pos = (self.pos[0]+dir[0],self.pos[1]+dir[1]) def draw(self, isEyes=False): pygame.draw.rect(self.surface, self.color, (self.pos[0] * GRID_DX + 1, self.pos[1] * GRID_DY + 1, GRID_DX - 1, GRID_DY - 1)) if isEyes: centrex = GRID_DX // 2 centrey = GRID_DY // 2 radius = 3 x0 = self.pos[0] * GRID_DX + centrex y0 = self.pos[1] * GRID_DY + centrey if self.dir[0]!=0: circle1 = (x0 + centrex/2*self.dir[0], y0 - centrey/2) circle2 = (x0 + centrex/2*self.dir[0], y0 + centrey/2) else: circle1 = (x0 - centrex/2, y0 + centrey/2*self.dir[1]) circle2 = (x0 + centrex/2, y0 + centrey/2*self.dir[1]) pygame.draw.circle(self.surface, EYE_COLOR, circle1, radius) pygame.draw.circle(self.surface, EYE_COLOR, circle2, radius)
true
d6303c0ba37434e6c91673165b54148f03fba6fc
Python
nrj127/mnistHelper
/myimplementation.py
UTF-8
643
2.71875
3
[]
no_license
# -*- coding: utf-8 -*- """ Created on Sun Apr 19 18:15:03 2015 @author: neeraj """ import matplotlib.pyplot as plt from sklearn import svm,metrics from matplotlib.image import cm from mnist_database import * db=mnist_database() (images,labels)=db.get_training_data() (images1,labels1) = db.get_testing_data() classifier = svm.SVC(gamma = 0.001) classifier.fit(images,labels) expected = labels1 predicted = classifier.predict(images1) print("Classification report for classifier %s:\n%s\n" % (classifier,metrics.classification_report(expected,predicted))) print ("Confusion matrix : \n%s" % metrics.confusion_matrix(expected,predicted))
true
9cfe00b10396a268545ac529adcdafc064afc4fe
Python
nejo/python_samples
/bolsa.py
UTF-8
1,019
3.1875
3
[]
no_license
# -*- coding: utf-8 -*- from reqresp import * import re a=Request( ) # Creamos la peticion a.setUrl("http://www.bolsamadrid.es/esp/mercados/acciones/accind1_1.htm") #Url de la bolsa española a.perform() HTML=a.response.getContent() #Obtenemos el HTML results=re.findall("<TR align=right.*<TD ID=R>.*</TD></TR>",HTML,re.M) #Buscamos las lineas que contienen los datos # re.M (es un flag para las regexp, significa # que la entrada es multilines, contiene retornos de carro acciones={} # Creamos diccionario que contendra los datos for i in results: a=re.findall("([A-Za-z0-9\. ]+)</A></TD><TD>([0-9,]+)",i) # Extraemos el nombre de la empresa y el valor en bolsa empresa=a[0][0].strip() # Limpiamos el nombre valor=float(a[0][1].replace(",",".")) # Convertimos el valor de bolsa a float (sustituyendo antes las , por . acciones[empresa]=valor # Rellenamos el diccionario for i,j in acciones.items(): print i,"==>",j
true
27ebf303ae59dbed35ad63ccb9bc27fd2c643187
Python
GongkunJiang/Github
/Python/NIST_06.py
UTF-8
1,858
2.90625
3
[]
no_license
""" Author: Dell Time: 2019/11/24 16:50 """ import random import numpy as np import math as mt import os def xor(s1, s2): result = '' for i in range(len(s1)): if s1[i] == s2[i]: result += '0' else: result += '1' return result def And(s1, s2): result = '' for i in range(len(s1)): if s1[i] == s2[i] == '1': result += '1' else: result += '0' return result def Or(s1, s2): result = '' for i in range(len(s1)): if s1[i] == s2[i] == '0': result += '0' else: result += '1' return result def inverse(s): result = '' for i in range(len(s)): if s[i] == '0': result += '1' elif s[i] == '1': result += '0' if len(result) < precision: result = '1' * (precision - len(result)) + result return result def m(m1, m2): product = m1 * m2 binary = bin(product)[2:] if len(binary) < 2 * precision: binary = '0' * (2 * precision - len(binary)) + binary retain = binary[-precision:] abandon = binary[:-precision] result = xor(retain, abandon) return int(result, 2) def func(x): return m(x, domain - x) if __name__ == '__main__': precision = 64 domain = 2 ** precision x = 15125645981407076156 piece = 56 wrote = 0 length = 10 ** 9 if os.path.exists('./data_06.txt'): os.remove('./data_06.txt') for i in range(100): x = func(x) with open(r'./data_06.txt', 'a') as f: while wrote < length: if wrote % 1000000 == 0: print(int((length - wrote) / 1000000)) x = func(x) string = '0' * (precision - len(bin(x)[2:])) + bin(x)[2:] f.write(string[-piece:]) wrote += piece
true
0266bd5a41f4b9ed000f3d0bc4a3890c73ca0427
Python
slyeel/python2.7-proteomics-lib
/tests/mascot_test.py
UTF-8
8,240
2.96875
3
[ "Artistic-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#!/bin/env python # # -*- coding: utf-8 -*- # # Copyright (C) University of Manchester 2011 # Julian Selley <j.selley@manchester.ac.uk> ################################################################################ """ Test Mascot Module ****************** This is a test suite for the Mascot module. It tests the elements of the Mascot module, including various file readers / streams. Overview ======== @todo 201112130955 JNS: write the documentation for this test suite """ # Metadata __version__ = '0.1.0' __author__ = 'Julian Selley <j.selley@manchester.ac.uk' __copyright__ = 'Copyright 2011 Julian Selley <j.selley@manchester.ac.uk>' __license__ = 'The Artistic License 2.0 (see the file LICENSE included with the distribution)' # This is a bit of a *hack* # to make this code work as part of the package. from os.path import join as pjoin # check ../.. for the mascot module to test import sys sys.path.append(pjoin('..', '..')) # Imports import proteomics.mascot import unittest # There is no test for the struct classes as these simply store data, without # any methods attached to test. class GroupXMLInputFileReaderTestCase(unittest.TestCase): """Test the Mascot Group XML File reader. This set of tests check the functionality of the Mascot Group XML File reader. The reader works as a reader, rather than streaming the file: this is because it is an XML file. """ def setUp(self): """Sets up the group of tests by creating a Group File Reader object, and reading the file. It stores the groups in a variable contained in the object, for testing in the various tests. """ # create a reference to the file reader. # use the Mascot Group XML File in the 'test_data' folder. self.grp_reader = proteomics.mascot.GroupXMLInputFileReader(pjoin('test_data', 'group.xml')) # read the file and store the list of groups in the test object self.groups = self.grp_reader.read_file() def test_number_groups(self): """Check the number of groups loaded from the test_data Group XML File. There should be 5 groups contained in the 'test_data' Mascot Group XML File. """ self.assertEqual(len(self.groups), 5, 'check the number of groups returned') def test_interrogate_groups(self): """Check the groups contain the Guest group, and then interrogate that group. Check that the 'Guests' group is defined. """ self.assertIn('Guests', [group.name for group in self.groups], 'check Guest group exists') """Identify the Guests group and make sure it's ID is 1. Check that the Group ID is also defined as an integer and not as a str. """ for group in self.groups: if (group.name != 'Guests'): continue self.assertNotEqual(group.id, '1', 'group id of guest') self.assertEqual(group.id, 1, 'group id of guest') class LogInputFileReaderTestCase(unittest.TestCase): """Test the Mascot log file reader. This set of tests check the functionality of the Mascot Log File reader. The reader works as a stream reader, rather than reading the data all in one go. It does however, open the file and read it's entire contents into memory. It only streams in terms of the way that data is passed back through the API. """ def setUp(self): """Sets up the group of tests by creating a log file reader object, and reading the file. """ # sets up a list of logs self.logs = [] # create a reference to the file reader. # use the Mascot Log File in the 'test_data' folder. self.log_reader = proteomics.mascot.LogInputFileReader(pjoin('test_data', 'searches.log')) # store the logs in a list of LogEntry for log_entry in self.log_reader: try: self.logs.append(log_entry) except StopIteration: pass def test_number_log_entries(self): """Check the number of log entries loaded from the test_data search log file. There should be 10 log entries contained in the 'test_data' Mascot Log File. """ self.assertEqual(len(self.logs), 10, 'number of log entries') def test_search_ids(self): """Check the ID's in the search log file. The search log file has been setup with sequential log ID's, from 1 to 10. Check this is the case. """ self.assertEqual([log.searchid for log in self.logs], range(1, 11), 'search ids') def test_ipaddr(self): """Check the IP addresses. For security purposes, the IP addresses have been changed to the wider B-class domain. Check that the IP addresses are correct, and that (because one entry is missing an IP address because it is actually a server submitted search to test the database), that there is one short of the 10 search log entries. """ self.assertEqual(''.join([log.ipaddr for log in self.logs]), '130.88.0.0' * (len(self.logs) - 1), # -1 because one of the log entries is missing an ipaddr 'IP address is 130.88.0.0') """ @todo: 20111220 JNS: test read_file @todo: 20111220 JNS: test reset """ class UserXMLInputFileReaderTestCase(unittest.TestCase): """Test the Mascot User XML File reader. This set of tests check the functionality of the Mascot User XML File reader. The reader works as a reader, rather than streaming the file: this is because it is an XML file. """ def setUp(self): """Sets up the group of tests by creating a User File Reader object, and reading the file. It stores the users in a variable contained in the object, for testing in the various tests. """ # create a reference to the file reader. # use the Mascot User XML File in the 'test_data' folder. self.usr_reader = proteomics.mascot.UserXMLInputFileReader(pjoin('test_data', 'user.xml')) # read the file and store the list of groups in the test object self.users = self.usr_reader.read_file() def test_number_users(self): """Check the number of groups loaded from the test_data User XML File. There should be 5 groups contained in the 'test_data' Mascot User XML File. """ self.assertEqual(len(self.users), 4, 'check the number of users returned') def test_interrogate_users(self): """Check the users contain the guest user, and then interrogate that user. Check that the 'guest' user is defined. """ self.assertIn('guest', [user.username for user in self.users], 'check guest user exists') """Identify the guest user and make sure it's ID is 1. Check that the guest ID is also defined as an integer and not as a str. Check the fullname of the guest user, and the e-mail address. """ for user in self.users: if (user.username != 'guest'): continue self.assertNotEqual(user.id, '1', 'user id of guest') self.assertEqual(user.id, 1, 'user id of guest') self.assertEqual(user.fullname, 'Guest user', 'full name of guest') self.assertEqual(user.email, 'guest@localhost', 'email address of guest') # if this test is being run from the command line, generate the relevant suites, # combine them together and then run them. if __name__ == '__main__': groupSuite = unittest.TestLoader().loadTestsFromTestCase(GroupXMLInputFileReaderTestCase) logSuite = unittest.TestLoader().loadTestsFromTestCase(LogInputFileReaderTestCase) userSuite = unittest.TestLoader().loadTestsFromTestCase(UserXMLInputFileReaderTestCase) suite = unittest.TestSuite([groupSuite, logSuite, userSuite]) #suite = unittest.TestLoader().loadTestsFromModule('mascot_test.py') unittest.TextTestRunner(verbosity=2).run(suite)
true
06fe46b1fb2028278202c804e1f848391c5908b0
Python
jrcomey/EAE127JRC
/Project 5/EAE127Project5JackComey.py
UTF-8
11,000
2.796875
3
[]
no_license
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sat Nov 21 12:32:38 2020 @author: jack """ # Imports import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt import time import copy import pandas as pd import ViscousInviscidInteraction as vivi import pyxfoil as pyx import mses plt.style.use('default') plt.style.use("seaborn-bright") params={#FONT SIZES # 'axes.labelsize':30,#Axis Labels 'axes.titlesize':30,#Title # 'font.size':20,#Textbox # 'xtick.labelsize':22,#Axis tick labels # 'ytick.labelsize':22,#Axis tick labels # 'legend.fontsize':24,#Legend font size # 'font.family':'sans-serif', # 'font.fantasy':'xkcd', # 'font.sans-serif':'Helvetica', # 'font.monospace':'Courier', # #AXIS PROPERTIES # 'axes.titlepad':2*6.0,#title spacing from axis # 'axes.grid':True,#grid on plot 'figure.figsize':(12,12),#square plots # 'savefig.bbox':'tight',#reduce whitespace in saved figures#LEGEND PROPERTIES # 'legend.framealpha':0.5, # 'legend.fancybox':True, # 'legend.frameon':True, # 'legend.numpoints':1, # 'legend.scatterpoints':1, # 'legend.borderpad':0.1, # 'legend.borderaxespad':0.1, # 'legend.handletextpad':0.2, # 'legend.handlelength':1.0, 'legend.labelspacing':0,} mpl.rcParams.update(params) #%########################### # Objects #%########################### # Functions def FindReynolds(rho, V, L, mu): Re = rho * V * L / mu return Re def CreateCamberLine(df): x = np.linspace(0, 1, 1000) up, lo = mses.MsesInterp(x, df.x, df.z) avg = up + lo avg *= 0.5 return x, avg def plothusly(ax, x, y, *, xtitle='', ytitle='', datalabel='', title='', linestyle='-', marker=''): """ A little function to make graphing less of a pain. Creates a plot with titles and axis labels. Adds a new line to a blank figure and labels it. Parameters ---------- ax : The graph object x : X axis data y : Y axis data xtitle : Optional x axis data title. The default is ''. ytitle : Optional y axis data title. The default is ''. datalabel : Optional label for data. The default is ''. title : Graph Title. The default is ''. Returns ------- out : Resultant graph. """ ax.set_xlabel(xtitle) ax.set_ylabel(ytitle) ax.set_title(title) out = ax.plot(x, y, zorder=1, label=datalabel, linestyle = linestyle, marker = marker) ax.grid(True) ax.legend(loc='best') return out def plothus(ax, x, y, *, datalabel='', linestyle = '-', marker = ''): """ A little function to make graphing less of a pain Adds a new line to a blank figure and labels it """ out = ax.plot(x, y, zorder=1, label=datalabel, linestyle = linestyle, marker = marker) ax.legend(loc='best') return out #%########################### # Main # Problem 1.1 # Initialize nonvariable properties mu = 3.737E-7 # slug/ft/s L = 0.5 # ft # Unit conversion: kts2fps = lambda v: (1.68781 * v) # Condition 1 rho_1 = 0.00237717 # slug/ft**3 Reynolds_1 = FindReynolds(rho_1, kts2fps(12), L, mu) print(f"Reynold's # at condition 1: {Reynolds_1:.2e}") # Condition 2 rho_2 = 0.000825628 # slug/ft**3 Reynolds_2 = FindReynolds(rho_2, kts2fps(575), L, mu) print(f"Reynold's # at condition 2: {Reynolds_2:.2e}") #%%########################### # Problem 1.2 # Laminar boundary layer laminar_boundary_u_nondim = lambda y_delta: ( 2 * y_delta - (y_delta**2)) # Turbulent Boundary Layer turbulent_boundary_u_nondim = lambda y_delta: (y_delta**(1/7)) # Data calculation y_non_dim = np.linspace(0, 1, 1000) laminar_u_non_dim = laminar_boundary_u_nondim(y_non_dim) turbulent_u_non_dim = turbulent_boundary_u_nondim(y_non_dim) fig, non_dim_boundary_layer = plt.subplots() plothusly(non_dim_boundary_layer, laminar_u_non_dim, y_non_dim, title="Non-Dimensional Boundary Layer Comparison", xtitle=r'$\frac{u}{u_e}$', ytitle=r"$\frac{y}{\delta}$", datalabel="Taxi") plothus(non_dim_boundary_layer, turbulent_u_non_dim, y_non_dim, datalabel="Cruise") # Shade in boundary layer vline = y_non_dim*0 plothus(non_dim_boundary_layer, laminar_u_non_dim, vline, linestyle='') plt.fill_betweenx(y_non_dim, vline, laminar_u_non_dim, facecolor='b', alpha=0.1) plt.axis('equal') # Make Arrows arrowwidth, arrowlength = 0.02, 0.02 for i in range(0, len(y_non_dim), 50): if abs(laminar_u_non_dim[i]) < arrowlength: plt.plot([0, laminar_u_non_dim[i]], [y_non_dim[i], y_non_dim[i]], color='b') else: plt.arrow(0, y_non_dim[i], laminar_u_non_dim[i]-arrowlength, 0, head_width=arrowwidth, head_length=arrowlength, color='b', linewidth=2, alpha=0.2) plothus(non_dim_boundary_layer, turbulent_u_non_dim, vline, linestyle='') plt.fill_betweenx(y_non_dim, vline, turbulent_u_non_dim, facecolor='g', alpha=0.1) plt.axis('equal') # Make Arrows arrowwidth, arrowlength = 0.02, 0.02 for i in range(0, len(y_non_dim), 50): if abs(turbulent_u_non_dim[i]) < arrowlength: plt.plot([0, turbulent_u_non_dim[i]], [y_non_dim[i], y_non_dim[i]], color='g') else: plt.arrow(0, y_non_dim[i], turbulent_u_non_dim[i]-arrowlength, 0, head_width=arrowwidth, head_length=arrowlength, color='g', linewidth=2, alpha=0.2) # Laminar Dimensional delta_x_lam = lambda Re, x: 5.0 * x * (Re**(-1/2)) laminar_y = y_non_dim * delta_x_lam(Reynolds_1, L) laminar_u = kts2fps(575) * laminar_boundary_u_nondim(laminar_y) # Turbulent Dimensional turbulent_u = turbulent_u_non_dim * kts2fps(575) delta_x_tur = lambda Re, x: 0.16 * x * (Re**(-1/7)) turbulent_y = y_non_dim * delta_x_tur(Reynolds_2, L) fig, boundary_layer_plot = plt.subplots() plothusly(boundary_layer_plot, laminar_u, laminar_y, title="Dimensional Boundary Layer Comparison", ytitle=r"$y$ [ft]", xtitle=r"$u$ [ft/s]", datalabel="Taxi") plothus(boundary_layer_plot, turbulent_u, turbulent_y, datalabel="Cruise") # Shade in boundary layer vline = laminar_y*0 plothus(boundary_layer_plot, laminar_u, vline, linestyle='') plt.fill_betweenx(laminar_y, vline, laminar_u, facecolor='b', alpha=0.1) vline = turbulent_y*0 plothus(boundary_layer_plot, turbulent_u, vline, linestyle='') plt.fill_betweenx(turbulent_y, vline, turbulent_u, facecolor='g', alpha=0.1) #%%########################### # Problem 1.3 Cflam = lambda Re: 1.328 * (Re**(-1/2)) C_f_laminar = 2 * Cflam(Reynolds_1) string = f"C_f for condition 1: {C_f_laminar:.4e}" print(string) Cftur = lambda Re: 0.074 * (Re**(-1/5)) C_f_turbulent = 2*Cftur(Reynolds_2) string = f"C_f for condition 2: {C_f_turbulent:.4e}" print(string) cf_lam_long = 2 * Cflam(FindReynolds(rho_1, kts2fps(12), 2*L, mu)) cf_tur_long = 2 * Cftur(FindReynolds(rho_2, kts2fps(575), 2*L, mu)) drag_lam_long = 0.5 * rho_1 * kts2fps(12)**2 * cf_lam_long drag_tur_long = 0.5 * rho_2 * kts2fps(575)**2 * cf_tur_long string = f"Drag force for double-length antenna at condition 1 is : {drag_lam_long:.5f} lbf" print(string) string = f"Drag force for double-length antenna at condition 2 is : {drag_tur_long:.5f} lbf" print(string) #%%########################### # Problem 2.1 airfoil_name = "naca23012" alpha = 0 currentiter = 0 V_inf = 1 # m/s rho = 1.225 # kg/m**3 mu = 1.789E-5 itermax = 4 df_mses = pyx.ReadXfoilAirfoilGeom('Data/naca23012/naca23012.dat') fig, viviplot = plt.subplots(figsize=(12,3)) fig, viviplot2 = plt.subplots(figsize=(12,3)) viviplot.axis('equal') plothusly(viviplot, df_mses["x"], df_mses["z"], datalabel=r"0$^{th}$", xtitle=r"$\frac{x}{c}$", ytitle=r"$\frac{z}{c}$", title="VIvI Iteration Comparison") plothusly(viviplot2, df_mses["x"], df_mses["z"], datalabel=r"", xtitle=r"$\frac{x}{c}$", ytitle=r"$\frac{z}{c}$", title="VIvI Iteration Comparison") x_avg, z_avg = CreateCamberLine(df_mses) plothus(viviplot2, x_avg, z_avg, datalabel=fr"Average Camber for Iteration {currentiter}") plt.axis("equal") fig, camberplot = plt.subplots() plothusly(camberplot, x_avg, z_avg, datalabel=fr"Average Camber for Iteration {currentiter}") for currentiter in range(itermax): theta_up, ue_up, theta_lo, ue_lo = vivi.VIvI(airfoil_name, alpha, currentiter, V_inf, mu, rho) if currentiter is not 0: df_disp = pyx.ReadXfoilAirfoilGeom(f"Data/naca23012/naca23012_{currentiter}.dat") plothus(viviplot, df_disp["x"], df_disp["z"], datalabel=rf"{currentiter}", linestyle='-') plothus(viviplot2, df_disp["x"], df_disp["z"], datalabel=rf"", linestyle='--') x, z = CreateCamberLine(df_disp) plothus(viviplot2, x, z, datalabel=f"Average Camber for Iteration {currentiter}") plothus(camberplot, x, z, datalabel=f"Average Camber for Iteration {currentiter}") #%%########################### # Problem 2.2 pyx.GetPolar(foil="Data/naca23012_3/naca23012_3.dat", naca=False, alfs=alpha, Re=0) third_iter_dat = pyx.ReadXfoilPolar("Data/naca23012_3/naca23012_3_polar_Re0.00e+00a0.00.dat") print(third_iter_dat.to_markdown()) theta_lo = theta_lo.to_numpy()[2:len(theta_lo)] theta_up = theta_up.to_numpy()[2:len(theta_up)] ue_up = ue_up.to_numpy()[2:len(ue_up)] ue_lo = ue_lo.to_numpy()[2:len(ue_lo)] # Find tau at every point tau_find = lambda mu, ue, theta: 0.664 * 0.332 * mu * ue / theta tau_lo = tau_find(mu, ue_lo, theta_lo) tau_up = tau_find(mu, ue_up, theta_up) cf = lambda tau: tau * (0.5 * rho * V_inf**2)**-1 cf_lo = cf(tau_lo) cf_up = cf(tau_up) x = df_disp.x.to_numpy() x = np.flip(x[2:len(tau_lo)+2]) Cf_lo = np.trapz(cf_lo, x) Cf_up = np.trapz(cf_up, x) Cf = Cf_lo + Cf_up string = f'Friction Coefficent for 3rd iteration = {Cf}' print(string) Re_3 = FindReynolds(rho, V_inf, 1, mu) string = f"Reynold's number is {Re_3:.2e}" print(string) pyx.GetPolar(foil='23012', naca=True, alfs=alpha, Re=Re_3) pyx.GetPolar(foil='23012', naca=True, alfs=alpha, Re=0) visc = pyx.ReadXfoilPolar("Data/naca23012/naca23012_polar_Re6.85e+04a0.00.dat") invisc = pyx.ReadXfoilPolar("Data/naca23012/naca23012_polar_Re0.00e+00a0.00.dat") #%%########################### # Problem 2.3
true
c85bf650e8bc628abed2ac9883993964191d1242
Python
alessiogambi/AsFault
/src/deepdrive/wrapper.py
UTF-8
9,401
2.53125
3
[ "MIT" ]
permissive
import cv2 import time import math import numpy from beamngpy import BeamNGpy, Scenario, Vehicle, setup_logging from beamngpy.sensors import Camera, GForces, Electrics, Damage from docutils.nodes import transition from shapely.geometry import Point import speed_dreams as sd def preprocess(img, brightness): # Elaborate Frame from BeamNG pil_image = img.convert('RGB') open_cv_image = numpy.array(pil_image) # Convert RGB to BGR. This is important open_cv_image = open_cv_image[:, :, ::-1].copy() # decrease_brightness and resize hsv = cv2.cvtColor(cv2.resize(open_cv_image, (280, 210)), cv2.COLOR_BGR2HSV) hsv[..., 2] = hsv[..., 2] * brightness preprocessed = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR) # Check that we are passing to the network values between 0 and 1 return preprocessed def translate_steering(original_steering_value): # Using a quadratic function might be too much # newValue = -1.0 * (0.4 * pow(original_steering_value, 2) + 0.6 * original_steering_value + 0) # This seems to over shoot. Maybe it's just a matter of speed and not amount of steering newValue = -1.0 * original_steering_value; linear_factor = 0.6 # Dump the controller to compensate oscillations in gentle curve if abs(original_steering_value) < 1: newValue = linear_factor * newValue # print("Steering", original_steering_value, " -> ", newValue) return newValue def main(MAX_SPEED): setup_logging() # Gains to port TORCS actuators to BeamNG # steering_gain = translate_steering() acc_gain = 0.5 # 0.4 brake_gain = 1.0 # BeamNG images are too bright for DeepDrive brightness = 0.4 # Set up first vehicle # ! A vehicle with the name you specify here has to exist in the scenario vehicle = Vehicle('egovehicle') # Set up sensors resolution = (280, 210) # Original Settings #pos = (-0.5, 1.8, 0.8) # Left/Right, Front/Back, Above/Below # 0.4 is inside pos = (0, 2.0, 0.5) # Left/Right, Front/Back, Above/Below direction = (0, 1, 0) # direction = (180, 0, 180) # FOV 60, MAX_SPEED 100, 20 (3) Hz fails # FOV 60, MAX_SPEED 80, 20 (3) Hz Ok # FOV 60, MAX_SPEED 80, 12 Hz Ok-ish Oscillations # FOV 60, MAX_SPEED 80, 10 Hz Ok-ish Oscillations # FOV 40, MAX_SPEED 50, 12 Hz Seems to be fine but drives slower # FOV 40, MAX_SPEED 80, 10 Hz Seems to be fine but drives slower fov = 60 # MAX_SPEED = 70 MAX_FPS = 60 SIMULATION_STEP = 6 # Running the controller at 20 hz makes experiments 3 to 4 times slower ! 5 minutes of simulations end up sucking 20 minutes ! # # WORKING SETTINGS: 20 Freq, 90 FOV. front_camera = Camera(pos, direction, fov, resolution, colour=True, depth=True, annotation=True) electrics = Electrics() vehicle.attach_sensor('front_cam', front_camera) vehicle.attach_sensor('electrics', electrics) # Setup the SHM with DeepDrive # Create shared memory object Memory = sd.CSharedMemory(TargetResolution=[280, 210]) # Enable Pause-Mode Memory.setSyncMode(True) Memory.Data.Game.UniqueRaceID = int(time.time()) print("Setting Race ID at ", Memory.Data.Game.UniqueRaceID) # Setting Max_Speed for the Vehicle. # TODO What's this? Maybe some hacky way to pass a parameter which is not supposed to be there... Memory.Data.Game.UniqueTrackID = int(MAX_SPEED) # Speed is KM/H print("Setting speed at ", Memory.Data.Game.UniqueTrackID) # Default for AsFault Memory.Data.Game.Lanes = 1 # By default the AI is in charge Memory.Data.Control.IsControlling = 1 deep_drive_engaged = True STATE = "NORMAL" Memory.waitOnRead() if Memory.Data.Control.Breaking == 3.0 or Memory.Data.Control.Breaking == 2.0: print("\n\n\nState not reset ! ", Memory.Data.Control.Breaking) Memory.Data.Control.Breaking = 0.0 # Pass the computation to DeepDrive # Not sure this will have any effect Memory.indicateWrite() Memory.waitOnRead() if Memory.Data.Control.Breaking == 3.0 or Memory.Data.Control.Breaking == 2.0: print("\n\n\nState not reset Again! ", Memory.Data.Control.Breaking) Memory.Data.Control.Breaking = 0.0 # Pass the computation to DeepDrive Memory.indicateWrite() # Connect to running beamng beamng = BeamNGpy('localhost', 64256, home='C://Users//Alessio//BeamNG.research_unlimited//trunk') bng = beamng.open(launch=False) try: bng.set_deterministic() # Set simulator to be deterministic bng.set_steps_per_second(MAX_FPS) # With 60hz temporal resolution # Connect to the existing vehicle (identified by the ID set in the vehicle instance) bng.connect_vehicle(vehicle) # Put simulator in pause awaiting further inputs bng.pause() assert vehicle.skt # Road interface is not available in BeamNG.research yet # Get the road map from the level # roads = bng.get_roads() # # find the actual road. Dividers lane markings are all represented as roads # theRoad = None # for road in enumerate(roads): # # ((left, centre, right), (left, centre, right), ...) # # Compute the width of the road # left = Point(road[0][0]) # right = Point(road[0][1]) # distance = left.distance( right ) # if distance < 2.0: # continue # else: # theRoad = road; # break # # if theRoad is None: # print("WARNING Cannot find the main road of the map") while True: # Resume the execution # 6 steps correspond to 10 FPS with a resolution of 60FPS # 5 steps 12 FPS # 3 steps correspond to 20 FPS bng.step(SIMULATION_STEP) # Retrieve sensor data and show the camera data. sensors = bng.poll_sensors(vehicle) # print("vehicle.state", vehicle.state) # # TODO: Is there a way to query for the speed directly ? speed = math.sqrt(vehicle.state['vel'][0] * vehicle.state['vel'][0] + vehicle.state['vel'][1] * vehicle.state['vel'][1]) # Speed is M/S ? # print("Speed from BeamNG is: ", speed, speed*3.6) imageData = preprocess(sensors['front_cam']['colour'], brightness) Height, Width = imageData.shape[:2] # print("Image size ", Width, Height) # TODO Size of image should be right since the beginning Memory.write(Width, Height, imageData, speed) # Pass the computation to DeepDrive Memory.indicateWrite() # Wait for the control commands to send to the vehicle # This includes a sleep and will be unlocked by writing data to it Memory.waitOnRead() # TODO Assumption. As long as the car is out of the road for too long this value stays up if Memory.Data.Control.Breaking == 3.0: if STATE != "DISABLED": print("Abnormal situation detected. Disengage DeepDrive and enable BeamNG AI") vehicle.ai_set_mode("manual") vehicle.ai_drive_in_lane(True) vehicle.ai_set_speed(MAX_SPEED) vehicle.ai_set_waypoint("waypoint_goal") deep_drive_engaged = False STATE = "DISABLED" elif Memory.Data.Control.Breaking == 2.0: if STATE != "GRACE": print("Grace period. Deep Driving still disengaged") vehicle.ai_set_mode("manual") vehicle.ai_set_waypoint("waypoint_goal") # vehicle.ai_drive_in_lane(True) STATE = "GRACE" else: if STATE != "NORMAL": print("DeepDrive re-enabled") # Disable BeamNG AI driver vehicle.ai_set_mode("disabled") deep_drive_engaged = True STATE = "NORMAL" # print("State ", STATE, "Memory ",Memory.Data.Control.Breaking ) if STATE == "NORMAL": vehicle.ai_set_mode("disabled") # Get commands from SHM # Apply Control - not sure cutting at 3 digit makes a difference steering = round(translate_steering(Memory.Data.Control.Steering), 3) throttle = round(Memory.Data.Control.Accelerating * acc_gain, 3) brake = round(Memory.Data.Control.Breaking * brake_gain, 3) # Apply commands vehicle.control(throttle=throttle, steering=steering, brake=brake) # # print("Suggested Driving Actions: ") # print(" Steer: ", steering) # print(" Accel: ", throttle) # print(" Brake: ", brake) finally: bng.close() if __name__ == '__main__': import argparse parser = argparse.ArgumentParser() parser.add_argument('--max-speed', type=int, default=70, help='Speed Limit in KM/H') args = parser.parse_args() print("Setting max speed to", args.max_speed) main(args.max_speed)
true
a6006871f8396387eb024d2e63aaf436f120dba2
Python
telminov/sw-excel-parser
/sw_excel_parser/fields.py
UTF-8
4,688
2.625
3
[ "MIT" ]
permissive
import uuid import itertools from typing import Any, Type, List, Dict import dateutil.parser from sw_excel_parser import validators class UnboundField: def __init__(self, field_class: Type['Field'], *args, **kwargs): self.field_class = field_class self.args = args self.kwargs = kwargs def bind(self, item, name: str) -> 'Field': return self.field_class(*self.args, **dict(self.kwargs, _item=item, name=name)) def __repr__(self): return '<{cls} ({field_cls} (args={args}, kwargs={kwargs}))>'.format( cls=self.__class__.__name__, field_cls=self.field_class.__name__, args=self.args, kwargs=self.kwargs ) class Field: default_validators = [ validators.RequiredValidator() ] def __new__(cls, *args, **kwargs): if '_item' and 'name' in kwargs: instance = super().__new__(cls) else: instance = UnboundField(cls, *args, **kwargs) return instance def __init__(self, header: str, required: bool = True, validators: List = list(), *args, **kwargs): self.header = header self.required = required self.validators = list(itertools.chain(self.default_validators, validators)) self.value = None self._item = kwargs.get('_item') self.name = kwargs.get('name') def run_validators(self, value: Any) -> Any: for validator in self.validators: value = validator(self, value) return value def to_python(self, value: Any) -> Any: return value def extract_data(self, data: Dict) -> Any: return data.get(self.header.lower()) def clean(self, data: Dict) -> Any: self.value = self.extract_data(data) val = self.to_python(self.value) return self.run_validators(val) class BooleanField(Field): default_validators = [] def __init__(self, *args , **kwargs): super().__init__(*args, **kwargs) self.false_values = kwargs.get('false_values', []) def to_python(self, value: Any): if value in self.false_values: value = False else: value = bool(value) return value class CharField(Field): def to_python(self, value: Any): if value: value = str(value).strip() return value class DateField(Field): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.dayfirst = kwargs.get('dayfirst', False) def to_python(self, value: Any): if value: try: value = dateutil.parser.parse(value, dayfirst=self.dayfirst).date() except (ValueError, OverflowError): raise validators.ValidationError('Некорректное значение.') return value class BaseNumericField(Field): default_validators = [ validators.RequiredValidator(), validators.MinValueValidator(), validators.MaxValueValidator() ] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.min_value = kwargs.get('min_value', None) self.max_value = kwargs.get('max_value', None) class FloatField(BaseNumericField): def to_python(self, value: Any) -> float: if value and not isinstance(value, float): try: value = float(value) except ValueError: raise validators.ValidationError('Некорректное значение.') return value class IntegerField(BaseNumericField): def to_python(self, value: Any) -> int: if value and not isinstance(value, int): try: value = float(value) if value.is_integer(): value = int(value) else: raise validators.ValidationError('Значение не является целым.') except ValueError: raise validators.ValidationError('Некорректное значение.') return value class EmailField(CharField): default_validators = [ validators.RequiredValidator(), validators.EmailValidator() ] class UUIDField(CharField): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.version = kwargs.get('version', 4) def to_python(self, value: Any): if value: try: value = uuid.UUID(value) except ValueError: raise validators.ValidationError('Некорректное значение.') return value
true
5cd00aae006a6f36d05af5076d1cb81ee004b1f3
Python
janycode/myGitCode
/python/egg×32/12.py
UTF-8
461
3.3125
3
[]
no_license
# -*- coding: utf-8 -*- # @Author: yuan.jiang # @Date: 2019-01-18 14:45:42 # @Last Modified by: yuan.jiang # @Last Modified time: 2019-01-18 14:50:22 num = ["harden","lampard",3,34,45,56,76,87,78,45,3,3,3,87686,98,76] print(num.count(3)) print(num.index(3)) for i in range(num.count(3)): #获取3出现的次数 ele_index = num.index(3) #获取首次3出现的坐标 num[ele_index]="3a" #修改3为3a print(num)
true
185fa43409e02eb6bc2564c543c7d7274e2c9429
Python
cmcm94/Projects-1
/Numbers/next-prime.py
UTF-8
482
3.734375
4
[ "MIT" ]
permissive
count=2 answer= 'y' ##returns yes if number is prime def isprime(num): prime='yes' for x in range (2,num): if num%x == 0: prime='no' x=num else: x+=1 return prime ##start while answer == 'y': answer=input('Find next prime? (y/n): ') if not answer == 'y': quit else: print(count) count+=1 while isprime(count) is 'no': count+=1
true
902500bc80985a44414c0a5f97a4d4ccf6bef6a5
Python
Aasthaengg/IBMdataset
/Python_codes/p03324/s965577264.py
UTF-8
95
2.734375
3
[]
no_license
D, N = map(int, input().split()) D = 100 ** D if N<= 99: print(D*N) else: print(D*101)
true
3d3288caeea19047d2fe3a6286d8d607147f31cd
Python
ChanikyaMerugu/CS5590-Python
/ICE-2/file1.py
UTF-8
592
3.078125
3
[]
no_license
<<<<<<< HEAD filename=input("what file are the numbers in?") infile=open(filename,'r') sum = 0.0 count=0 line=infile.readline() while line != "": for num in line.split(","): sum=sum+eval(num) count=count+1 line=infile.readline() ======= filename=input("what file are the numbers in?") infile=open(filename,'r') sum = 0.0 count=0 line=infile.readline() while line != "": for num in line.split(","): sum=sum+eval(num) count=count+1 line=infile.readline() >>>>>>> 6c926a1c43c3b530d8567d9d7892aaee56d27cc6 print('\n avg of numbers is', sum/count)
true
328886de505b6d06018cbeb45e2e9b735f228c9e
Python
susuminmin/django_crud
/articles/urls.py
UTF-8
993
2.65625
3
[]
no_license
from django.urls import path from . import views app_name = 'articles' # url name space 를 만든 것 # ==> url 들이 articles 에 있는 ___ 페이지가 된다 (/articles/_______) urlpatterns = [ path('', views.index, name='index'), # 데이터를 전달받아서 article 생성 path('create/', views.create, name='create'), path('<int:article_pk>/', views.detail, name='detail'), path('<int:article_pk>/delete/', views.delete, name='delete'), path('<int:article_pk>/update/', views.update, name='update'), # 댓글 예: /article/3/comments/ path('<int:article_pk>/comments/', views.comment_create, name='comment_create'), # comment의 pk 를 variable routing 으로 받을 것 (댓글 데이터에도 다 pk 값 달려 있음) # 댓글 삭제하기(190924) url 추가 # /articles/3/comments/2/delete 형식으로 만들 것 path('<int:article_pk>/comments/<int:comment_pk>/delete/', views.comment_delete, name='comment_delete'), ]
true
39fd0e8f48cc2354bdcf2fc150979fa3047ca47a
Python
xxdunedainxx/Z-Py
/src/core/service/helpers/security/RandomStringGenerator.py
UTF-8
669
3.640625
4
[ "Apache-2.0" ]
permissive
import random import string class RandomStringGenerator(): rand_string_methods = [ string.ascii_letters, string.hexdigits, string.octdigits, string.digits ] def __init__(self): pass @staticmethod def grab_random_character(): return (random.choice( RandomStringGenerator.rand_string_methods[random.randint(0,len(RandomStringGenerator.rand_string_methods) - 1)] ) ) @staticmethod def random_string(strlen = 15): rstring='' for i in range(strlen): rstring+=RandomStringGenerator.grab_random_character() return rstring
true
b18eb7022e36981d63457db8ae57af8b2033276b
Python
kiiikii/ptyhon-learn
/lab-test.py
UTF-8
1,472
4.71875
5
[]
no_license
# Scenario : # Once upon a time in Appleland, John had three apples, Mary had five apples, and Adam had six apples. They were all very happy and lived for a long time. End of story. # My task is # 1 create the variables: john, mary, and adam (v) # 2 assign values to the variables. The values must be equal to the numbers of fruit possessed by John, Mary, and Adam respectively (v) # 3 having stored the numbers in the variables, print the variables on one line, and separate each of them with a comma (v) # 4 now create a new variable named totalApples equal to addition of the three former variables (v) # 5 print the value stored in totalApples to the console (v) # 6 experiment with your code : # create new variables, assign different values to them, and perform various arithmetic operations on them (e.g., +, -, *, /, //, etc.) (v) # Try to print a string and an integer together on one line, e.g., "Total number of apples:" and totalApples () #stored var value john = 3 mary = 5 adam = 6 totalapples = john + mary + adam minus = john - mary - adam times = john * mary * adam divide = john / mary / adam doudiv = john // mary // adam doutim = john ** mary ** adam # driver code print(john, mary, adam, sep=", ", end="\n\n") print(totalapples, end="\n\n") print(minus, end="\n\n") print(times, end="\n\n") print(divide, end="\n\n") print(doudiv, end="\n\n") print(doutim, end="\n\n") print("Total number of apples :" and totalapples)
true
ed7a404a7898b9e07047ab1e2efe1088ad434c2a
Python
zhujingxiu/luffy
/luffy_course/modules/teacher/main.py
UTF-8
2,083
2.625
3
[]
no_license
#!/usr/bin/env python # -*- coding:utf-8 -*- # _AUTHOR_ : zhujingxiu # _DATE_ : 2018/1/22 from conf import settings from core.logger import Logger from .models import Models from .auth import Auth class Main: __log_type = 'teacher' __teacher_menu = { '1': {'title': '查看个人信息', 'action': Models.profile, 'log': False}, '2': {'title': '查看班级', 'action': Models.show_classes, 'log': True}, '3': {'title': '查看学生', 'action': Models.show_students, 'log': False}, '4': {'title': '查看上课记录', 'action': Models.show_duties, 'log': False}, '0': {'title': '退出系统', 'action': Auth.logout, 'log': True}, } @classmethod def run(cls): teacher = None retry_count = 0 while retry_count < settings.LOGIN_ATTEMPTS: username = input("请输入讲师账户:>>").strip() password = input("请输入登录密码:>>").strip() ret = Auth.authenticate(**{'username': username, 'password': password, 'logger': Logger(cls.__log_type)}) if ret: print('\033[92m登录成功\033[0m') teacher = ret break else: print('登录失败,用户名和密码不匹配') retry_count += 1 else: print('输入错误次数太多,已强制退出') exit(0) if teacher: cls.interactive(teacher) @classmethod def interactive(cls, user): option = 0 menu = cls.__teacher_menu while not option: for k in menu: print("%s.%s" % (k, menu[k]['title'])) option = input("请输入操作序号:>>").strip() if option not in menu.keys(): option = 0 print("错误的选项") continue kwargs = {} if menu[option]['log']: kwargs['logger'] = Logger(cls.__log_type) menu[option]['action'](user, **kwargs) option = 0
true
d4ea6e061951845257b9f81220ee7c54ca69cec5
Python
gbrener/dotfiles
/python/.pythonrc.py
UTF-8
3,391
2.625
3
[]
no_license
# Import libraries needed by this file from __future__ import print_function, unicode_literals, division import subprocess import sys print('\nfrom __future__ import print_function, unicode_literals, division' '\nimport sys') def conda_install(name): """Install a library from the REPL.""" subprocess.call('conda install -y {}'.format(name), shell=True) def pip_install(name): """Install a library from the REPL.""" subprocess.call('pip install --upgrade {}'.format(name), shell=True) class LazyImporter(object): def __init__(self, name, fromlib=None, alias=None): self.name = name self.fromlib = fromlib self.alias = alias def _import_module(self): import importlib # Assumes that globals() already contains import alias/name name = object.__getattribute__(self, 'name') fromlib = object.__getattribute__(self, 'fromlib') alias = object.__getattribute__(self, 'alias') print('Importing "{}"...'.format(name)) if (alias is not None and alias in globals()) or name in globals(): del globals()[(alias or name)] try: if fromlib is None: _module = importlib.import_module(name) else: _module = getattr(importlib.import_module(fromlib), name) except ModuleNotFoundError: globals()[(alias or name)] = self response = input('Module "{0}" not found. Install "{0}"? ([y]/n): '.format((fromlib or name))) if response in ('', 'y', 'Y', 'Yes', 'yes', 'YES'): conda_install(name) return object.__getattribute__(self, '_import_module')() raise globals()[(alias or name)] = _module return _module def __getattribute__(self, attr): _module = object.__getattribute__(self, '_import_module')() return getattr(_module, attr) def __call__(self, *args, **kwargs): _callable = object.__getattribute__(self, '_import_module')() return _callable(*args, **kwargs) __DEFERRED_IMPORTS = [] def defer_import(name, fromlib=None, alias=None): global __DEFERRED_IMPORTS globals()[(alias or name)] = LazyImporter(name, fromlib=fromlib, alias=alias) __DEFERRED_IMPORTS.append((name, fromlib, alias)) # Defer importing libraries, for fast startup time defer_import('re') defer_import('os') defer_import('shutil') defer_import('glob') defer_import('shlex') defer_import('inspect') defer_import('datetime') defer_import('namedtuple', fromlib='collections') defer_import('defaultdict', fromlib='collections') defer_import('OrderedDict', fromlib='collections') defer_import('Counter', fromlib='collections') defer_import('deque', fromlib='collections') defer_import('numpy', alias='np') defer_import('pandas', alias='pd') defer_import('parse', fromlib='dateutil.parser', alias='parse_date') # Display import lines for _name, _fromlib, _alias in __DEFERRED_IMPORTS: _pretty_import = 'import {}'.format(_name) if _fromlib is not None: _pretty_import = 'from {} '.format(_fromlib) + _pretty_import if _alias is not None: _pretty_import += ' as {}'.format(_alias) print(_pretty_import) print() # Print python version and startup time __ver = sys.version_info print('Python v{}.{}.{}'.format(__ver.major, __ver.minor, __ver.micro))
true
de02c440f4230790ad510f1c2bdbc4ddbe6d8da0
Python
diegoms21/DiegoRaspy4
/Clases_Udemy/4_GPIO/11_PantallaLCD16x2.py
UTF-8
675
3.59375
4
[]
no_license
#Hay varias modificaciones que hacerle a la libería antes de usar este código #Revisar el video del curso udemy import lcddriver import time lcd = lcddriver.lcd() '''HACE QUE EL MENSAJE AVANCE''' texto = "" maximo = 12 while maximo > 0: lcd.lcd_display_string(texto+"Hola",2) time.sleep(1) texto = texto + " " lcd.lcd_clear() maximo = maximo - 1 print("fin de programa") ''' CODIGO PARA HACER QUE PARPADE EL MENSAJE HOLA try: while True: #Para que el mensaje parpadee lcd.lcd_display_string("hola",1) #mensaje hola en la fila 1 time.sleep(1) lcd.lcd_clear() time.sleep(1) except: print("fin de programa")'''
true
3e949b48673e7c1b4ccfc800b0b74901d368006b
Python
featherko/epythopam
/homework/hw1/task2_fib.py
UTF-8
739
4.3125
4
[]
no_license
"""Task 1. Given a cell with "it's a fib sequence" from slideshow, please write function "check_fib", which accepts a Sequence of integers, and returns if the given sequence is a Fibonacci sequence We guarantee, that the given sequence contain >= 0 integers inside. """ from typing import Sequence def check_fibonacci(data: Sequence[int]) -> bool: """Check fibonacci. Checks if given sequence is fibonacci sequence :param data: given sequence :return: Returns true if given sequence is fibonacci """ if len(data) <= 2: return data == [0, 1] else: for i in range(2, len(data)): if not data[i] == data[i - 1] + data[i - 2]: return False return True
true
b8af7d0ab073110e0d2abb00f54b6b5c76ab7f86
Python
yscholty/yannic_robot
/src/yannic/yannic_robot/scripts/track_blob.py
UTF-8
1,872
2.671875
3
[]
no_license
#!/usr/bin/env python import rospy from std_msgs.msg import Int32 # Messages used in the node must be imported. from geometry_msgs.msg import Pose import sys import cv2 import numpy as np rospy.init_node("track_blob") cap=cv2.VideoCapture(1) pub = rospy.Publisher('follow_blob', Pose, queue_size=10) target_pose=Pose() # declaring a message variable of type Int32 x_d=0.0 y_d=0.0 x_d_p=0.0 y_d_p=0.0 while(1): _, img = cap.read() #converting frame(img i.e BGR) to HSV (hue-saturation-value) hsv=cv2.cvtColor(img,cv2.COLOR_BGR2HSV) blue_lower=np.array([94,123,46],np.uint8) blue_upper=np.array([125,255,255],np.uint8) blue=cv2.inRange(hsv,blue_lower,blue_upper) #Morphological transformation, Dilation kernal = np.ones((5 ,5), "uint8") blue=cv2.dilate(blue,kernal) img=cv2.circle(img,(260,68),5,(255,0,0),-1) #Tracking the Blue Color (_,contours,hierarchy)=cv2.findContours(blue,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) if len(contours)>0: contour= max(contours,key=cv2.contourArea) area = cv2.contourArea(contour) if area>800: x,y,w,h = cv2.boundingRect(contour) img = cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2) img=cv2.circle(img,((2*x+w)/2,(2*y+h)/2),5,(255,0,0),-1) img=cv2.line(img,(260,68),((2*x+w)/2,(2*y+h)/2),(0,255,0),2) x_d= (((2*y+h)/2)-68) * 0.06 y_d= (((2*x+w)/2)-260) * 0.075 s= 'x_d:'+ str(x_d)+ 'y_d:'+str(y_d) cv2.putText(img,s,(x-20,y-5),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,255),1,cv2.LINE_AA) if (abs(x_d-x_d_p)> 1 or abs(y_d-y_d_p)>1): target_pose.position.x=x_d*0.01 target_pose.position.y=y_d*0.01 target_pose.position.z=0.0 pub.publish(target_pose) x_d_p=x_d y_d_p=y_d cv2.imshow("Mask",blue) cv2.imshow("Color Tracking",img) if cv2.waitKey(1)== ord('q'): break cap.release() cv2.destroyAllWindows()
true
6f738d06a85da779e4053bdf9e64a6e384866a56
Python
ssubramanian90/Algorithm-design-manual
/algorithm design manual/1. Introduction to Algorithm Design/1- 32 Australian Voting.py
UTF-8
5,244
3.09375
3
[]
no_license
#Australian ballots require that voters rank all the candidates in order of #choice. Initially only the first choices are counted, and if one candidate #receives more than 50% of the vote then that candidate is elected. However, if #no candidate receives more than 50%, all candidates tied for the lowest number #of votes are eliminated. Ballots ranking these candidates first are recounted #in favor of their highest-ranked non-eliminated candidate. This process of #eliminating the weakest candidates and counting their ballots in favor of the #preferred non-eliminated candidate continues until one candidate #receives more than 50% of the vote, or until all remaining candidates are tied. import sys names=[] def main(): #get the number of elections num_elections= input() print(num_elections +"\n") num_elections= int(num_elections) with open(file, "r") as f: for line in f: names.append(line.rstrip()) while num_elections>0: #get the number of candidatwes for that election numcandidates= input() candidates=[0]*int(numcandidates) #randomly generate a set of candidates s= Set() while len(s)<int(numcandidates): s.add(random.randint(1, len(names)) count=0 for i in s: candidates[count]=names[i-1] count+=1 #generate the ballot results w=input() candidateweights=int(w)*int(numcandidates) ballots=[] while(candidateweights > 0): s=Set() while len(s)<int(numcandidates): s.add(random.randint(1, numcandidates) s=random.sample(s, len(s)) ballots.append(s) candidateweights-=1 maxvotes=evaluate(ballots,candidates) getwinner(maxvotes,ballots, candidates) def evaluate (ballots,candidates): ''' Determines the winner of the election. ''' maxvotes=[] for ballot in ballots: topvote = ballot.split('\n')[1] topcandidate = candidates[topvote] maxvotes.add(topcandidate) return maxvotes def getwinner(maxvotes, ballots, candidates) ''' Determines the winner of the election. ''' while (True): # Check for a clear winner majority = len(maxvotes) / 2 votes =winners=leastVotes= [] for maxvote in maxvotes: if maxvotes.count(maxvote) > majority: return maxvote break else: votes.append(maxvotes.count(maxvote))# Here, votes counts the number of times the candidate appears in the ballot winners list sortedvotes=sorted(votes, key=int, reverse=True) if len(sortedvotes) > 0: # Check for an all-way tie winners.append(s for sortedvotes in s if s=sortedvotes[0] ) return winners break # Find the least number of votes leastVotes.append(s for sortedvotes in s if s=sortedvotes[-1] ) # Remove all candidates with the least number of votes for maxvote in maxvotes: if leastVotes == maxvotes.count(maxvote): removed.append(maxvote) return(getwinner(replace(removed, maxvotes, ballots, candidates), ballots, candidates)) def replace(removed, maxvotes, ballots, candidates): newmaxvotes=[] # Redistribute ballots from removed candidate for ballot in ballots: total=len(ballot.split('\n')) if candidates[ballot.split('\n')[1]]in removed: for i in range(2,total): if candidates[ballot.split('\n')[i]] not in removed: newmaxvotes.add(candidates[candidates[ballot.split('\n')[i]]) break else: i+=1 else: newmaxvotes.append(candidates[ballot.split('\n')[1]]) return newmaxvotes
true
06ed9ab10e67f61f30a78882a2ad48b08a4a2472
Python
jesdin/OST
/Exp 4/exp4.py
UTF-8
666
4.25
4
[]
no_license
print("") print("*Set Operations*") print("---------------------------------") print("") setA = {1, 3, 5, 7} setB = {2, 4, 6, 7} print("setA: {}".format(setA)) print("setB: {}".format(setB)) print("") print("*union*") print(setA | setB) print("") print("*Intersection*") print(setA & setB) print("") print("*Difference*") print("A-B: +", end="") print(setA - setB) print("") print("*Symmetric Difference*") print(setA ^ setB) print("") print("*A subset of B*") print(setA.issubset(setB)) print("") print("*A superset of B*") print(setA.issuperset(setB)) print("") print("*Update*") setA.update(setB) print(setA) print("") print("*length*") print(len(setA))
true
df02662e96c8dc1e7c2e033b3bb7c7fc4e6bd08c
Python
syskall/pyglfw
/test.py
UTF-8
2,815
2.515625
3
[ "Zlib" ]
permissive
#!/usr/bin/env python import time import os import glfw import ctypes GL_COLOR_BUFFER_BIT = 0x00004000 glClear = glfw.ext.OpenGLWrapper("glClear", None, ctypes.c_uint) # create icon (simple GLFW logo) icon = """ ................ ................ ...0000..0...... ...0.....0...... ...0.00..0...... ...0..0..0...... ...0000..0000... ................ ................ ...000..0...0... ...0....0...0... ...000..0.0.0... ...0....0.0.0... ...0....00000... ................ ................ """ icon = [s.strip() for s in icon.split("\n") if s.strip()] icon_width = len(icon[0]) icon_height = len(icon) icon_data = "".join([s.replace("0", "\x3f\x60\x60\xff").replace(".", "\x00\x00\x00\x00") for s in icon[::-1]]) def log(msg): # print("%06d %s" % (log.eventid, msg)) log.eventid += 1 log.eventid = 0 # callback functions def on_resize(w, h): log("Window resize: %d, %d" % (w, h)) def on_key(key, pressed): if pressed: log("Key press: %s" % str(key)) else: log("Key release: %s" % str(key)) def on_char(char, pressed): if pressed: log("Char press: %s" % char) else: log("Char release: %s" % char) def on_button(button, pressed): if pressed: log("Button press: %d" % button) else: log("Button release: %d" % button) def on_pos(x, y): log("Mouse pos: %d %d" % (x, y)) def on_scroll(pos): log("Scroll: %d" % pos) def on_close(): log("Close (press escape to exit)") return False def on_refresh(): log("Refresh") glClear(GL_COLOR_BUFFER_BIT) glfw.SwapBuffers() glfw.Init() print("Available video modes:\n%s\n" % "\n".join(map(str, glfw.GetVideoModes()))) print("Desktop video mode:\n%s\n" % glfw.GetDesktopMode()) print("GLFW Version: %d.%d.%d" % glfw.GetVersion()) glfw.OpenWindow(800, 600, 0, 0, 0, 8, 0, 0, glfw.WINDOW) print("OpenGL version: %d.%d.%d\n" % glfw.GetGLVersion()) glfw.ext.set_icons([(icon_data, icon_width, icon_height)]) glfw.SetWindowTitle("pyglfw test") glfw.Disable(glfw.AUTO_POLL_EVENTS) glfw.Enable(glfw.KEY_REPEAT) center_x = glfw.GetDesktopMode().Width / 2 - glfw.GetWindowSize()[0] / 2 center_y = glfw.GetDesktopMode().Height / 2 - glfw.GetWindowSize()[1] / 2 glfw.SetWindowPos(center_x, center_y) glfw.SetWindowSizeCallback(on_resize) glfw.SetWindowCloseCallback(on_close) glfw.SetWindowRefreshCallback(on_refresh) glfw.SetKeyCallback(on_key) glfw.SetCharCallback(on_char) glfw.SetMouseButtonCallback(on_button) glfw.SetMousePosCallback(on_pos) glfw.SetMouseWheelCallback(on_scroll) while glfw.GetWindowParam(glfw.OPENED): glfw.PollEvents() if glfw.GetKey(glfw.KEY_ESC): break glClear(GL_COLOR_BUFFER_BIT) glfw.SwapBuffers() glfw.CloseWindow() glfw.Terminate()
true
b6240403611fea23d5d363a646bd654b26768b16
Python
ppg003/Data-Structure
/Paixu/5-1.Quick.py
UTF-8
1,669
3.140625
3
[]
no_license
from tools.array import random_array_unique from tools.test import is_in_order from tools.test import same_element import time def quick_sort(A): length = len(A) index_pivot = 0 index_right = length if length in [0, 1]: return A pivot = A[index_pivot] print("pivot : %s" % pivot) if length == 2: if A[0] > A[1]: A[0], A[1] = A[1], A[0] return A for i in range(length): if A[i] > pivot: index_right = i # print("index right : %s" % index_right) break for i in range(index_right + 1, length): if A[i] < pivot: A[index_right], A[i] = A[i], A[index_right] index_right += 1 if index_right == 0: A[index_pivot], A[length - 1] = A[length - 1], A[index_pivot] index_right = length else: A[index_pivot], A[index_right - 1] = A[index_right - 1], A[index_pivot] print("nums : %s" % A) print("index right : %s" % index_right) nums_left = A[index_pivot:index_right - 1] nums_right = A[index_right:len(A) + 1] print("left : %s" % nums_left) print("right : %s" % nums_right) A = quick_sort(nums_left) + [pivot] + quick_sort(nums_right) return A # Scenario x = random_array_unique(20) y = [-1892, 5656, -1854, -8386, 2474, -6352, 3410, 7288, 8285, -3635, 5192, -8577, -4019, -9392, 2091, -3512, -9157, -4537, -9512, -3879] z = [5, 4, 3, 2] z2 = [5, 7, 9, 2, 6, 1, 4] print(z) z_sort = quick_sort(z) print(z_sort) print(same_element(z, z_sort)) start = time.clock() print(is_in_order(quick_sort(z))) end = time.clock() print("Time : %f" % (end - start))
true
c2115780eebbc11597200e04ca37bec39d7abb2b
Python
c29r3/get_back_eth
/main.py
UTF-8
7,202
2.640625
3
[]
no_license
from web3 import Web3, HTTPProvider import requests import yaml from time import sleep import json from multiprocessing.dummy import Pool as ThreadPool import itertools from tqdm import tqdm with open("config.yml", 'r') as config: cfg = yaml.load(config, Loader=yaml.FullLoader) eth_provider = str(cfg["eth_provider_url"]) w3 = Web3(HTTPProvider(eth_provider)) file_name = cfg["file_name"] csv_delimiter = str(cfg["csv_delimiter"]) csv_file_name = file_name + ".csv" recipient_address = str(cfg["recipient_address"]) eth_gas_limit = int(cfg["eth_gas_limit"]) eth_gas_price = int(cfg["manual_gas_price"] * 1e9) wait_for_gasprice_value = int(cfg["wait_for_gasprice_value"] * 1e9) wait_for_gasprice = str(cfg["wait_for_gasprice"]) sleep_before_tx = cfg["sleep_before_tx"] threads_count = int(cfg["threads_count"]) contract_address = Web3.toChecksumAddress("0x1cc4426e36faeff09963d6b8b2da3b45f2f1deeb") with open("utils/balance_checker_ABI.json") as f: info_json = json.load(f) abi = info_json contract = w3.eth.contract(contract_address, abi=abi) def wait_until_fee_less(current_fee: int) -> int: """ Do not start sending ETH until the fee is less than 'current_fee' :param current_fee: int :return: value less than current_fee """ if current_fee > wait_for_gasprice_value: print(f"Gas price is too high {current_fee} wei\n" f"Waiting until the price of the commission decreases. Check period - 1 minute") while current_fee > wait_for_gasprice_value: sleep(60) current_fee = eth_price_gasstation() print(f'Current gas price {current_fee / 1e9} wei') return current_fee def list_split(keypairs_list: list, list_size: int = 1000): """ Divides large lists of pairs of addresses and private keys into a list of lists of a thousand items :param list_size: number of items in a sublist :param keypairs_list: ["address1;private_key1", "address2;private_key2" ...] :return: Object <class 'generator'> """ keypairs_len = len(keypairs_list) if keypairs_len == 0: raise Exception("split_list(): list is empty --> exit") elif keypairs_len < list_size: return keypairs_list elif keypairs_len > list_size: for i in range(0, keypairs_len, list_size): yield keypairs_list[i:i + list_size] def contract_check(keypair_list: list, contract_addr=None) -> list: """ Checks the balance of thousands of addresses through a smart contract Returns addresses with a balance greater then zero :param keypair_list: ["address1;private_key1", "address2;private_key2" ...] :param contract_addr: ["0x0000000000000000000000000000000000000000"] :return: ["address;private_key;balance_value", ...] """ non_empty_addreses = [] if contract_addr is None: contract_addr = ['0x0000000000000000000000000000000000000000'] addresses = [] privs = [] for i in range(len(keypair_list)): addresses.append(Web3.toChecksumAddress(keypair_list[i].split(";")[0])) privs.append(keypair_list[i].split(";")[1]) raw_balance = contract.functions.balances(addresses, contract_addr).call() if sum(raw_balance) > 0: for i, amount in enumerate(raw_balance): if amount > 0: # print(f'https://etherscan.io/address/{addresses[i]} {privs[i]} {str(amount)}') non_empty_addreses.append([addresses[i], privs[i], str(amount)]) pbar.update(1) return non_empty_addreses def eth_price_gasstation(): # getting safelow gas price for ETH from https://ethgasstation.info/ try: req = requests.get("https://ethgasstation.info/api/ethgasAPI.json") if req.status_code == 200 and "safeLow" in str(req.content): safe_low_price = int(int(req.json()["safeLow"]) / 10 * 1e9) return safe_low_price except Exception as gas_price_err: print("Can't get current gas price --> getting web3 default value") return w3.eth.gasPrice def write_log(string_to_write: str): with open(f"{file_name}_log.txt", 'a') as log: log.write(string_to_write + '\n') def read_csv() -> list: print(f'Reading file {csv_file_name}...') filtered_incorrect = [] with open(csv_file_name, 'r') as csv_file: csv_reader = csv_file.read() data_lst = csv_reader.split("\n") for line in data_lst: if line == "": continue line = line.split(";") addr = line[0] priv = line[1] if len(addr) != 42 or addr[:2] != "0x" or len(priv) != 64: print(f"Incorrect address or private key format {addr}") continue filtered_incorrect.append(f'{addr};{priv}') print(f'Found {len(data_lst)} lines in file {csv_file_name}') return filtered_incorrect def get_actual_nonce(address: str) -> int: return w3.eth.getTransactionCount(Web3.toChecksumAddress(address)) def get_eth_balance(address: str) -> int: return w3.eth.getBalance(Web3.toChecksumAddress(address)) def get_eth_signed_tx(sender_nonce: int, private_key: str, amount: int) -> str: eth_signed_tx = w3.eth.account.signTransaction(dict( nonce=sender_nonce, gasPrice=eth_gas_price, gas=eth_gas_limit, to=Web3.toChecksumAddress(recipient_address), value=amount, data=b'', ), private_key ) return eth_signed_tx # read file csv_data = read_csv() split_by = 1000 splitted_lst = list(list_split(csv_data, split_by)) pool = ThreadPool(threads_count) pbar = tqdm(total=len(splitted_lst)) non_empty_lst = pool.map(contract_check, splitted_lst) # Merge sublists to one entire list non_empty_lst = list(itertools.chain.from_iterable(non_empty_lst)) print(f'Found {len(non_empty_lst)} non empty addresses') # Auto detect safelow fee if it NOT manually setup if eth_gas_price == 0: eth_gas_price = eth_price_gasstation() print(f'Current transaction price in wei: {Web3.fromWei(eth_gas_price, "gwei")}') # Waiting for low commission to send if it "on" in config if wait_for_gasprice == "on": eth_gas_price = wait_until_fee_less(eth_gas_price) print(eth_gas_price) print("Start sending process") for i in range(0, len(non_empty_lst)): addr = non_empty_lst[i][0] priv = non_empty_lst[i][1] actual_balance = int(non_empty_lst[i][2]) nonce = get_actual_nonce(addr) amount_to_send = actual_balance - (eth_gas_price * eth_gas_limit) if amount_to_send <= 0: print(f'{i+1} | https://etherscan.io/address/{addr} Insufficient funds {amount_to_send} --> SKIP') continue print(f'{i+1} | {addr} eth to send: {str(amount_to_send / 1e18)}') eth_signed_tx = get_eth_signed_tx(sender_nonce=nonce, private_key=priv, amount=amount_to_send) tx_id_bin = w3.eth.sendRawTransaction(eth_signed_tx.rawTransaction) tx_id_hex = Web3.toHex(tx_id_bin) tx_id = f'TX_ID: https://etherscan.io/tx/{tx_id_hex}' print(tx_id) write_log(f'{str(i+1)} {tx_id}') sleep(sleep_before_tx) print('end of script')
true
06b6579bb7cb033e9196f1299c5434da659a74c1
Python
ivymorenomt/HackerrankPython
/Introduction/PrintFunction.py
UTF-8
224
4.28125
4
[]
no_license
#Read an integer N. #Without using any string methods, try to print the following: #123...N Note that "N" represents the values in between. print("Enter an integer") n = int(input()) for i in range(n): print(i+1, end=' ')
true
7c4574747d625c7a5e6eac1a7c14da34bb2ef999
Python
SesameMing/Python51CTONetwork
/day11/多进程之间的数据共享3.py
UTF-8
546
2.5625
3
[]
no_license
#!/usr/bin/env python # -*-coding:utf-8-*- # Author:SemaseMing <blog.v-api.cn> # Email: admin@v-api.cn # Time: 2016-10-18 16:41 # 在windows下不支持多进程,在linux下支持, # windows下只能在 测试模式下使用 from multiprocessing import Process from multiprocessing import Array def foo(i, arg): arg[i] = i + 100 for item in arg: print(item) print("================") if __name__ == '__main__': li = Array('i', 10) for i in range(10): p = Process(target=foo, args=(i, li, )) p.start()
true
19c9656bb585a9471481ef61dbacc338ccec9ab7
Python
huiqinwang/ReviewProject
/BigData/Spark/SparkStreaming/streamingKafka.py
UTF-8
1,579
2.5625
3
[]
no_license
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 17-9-2 下午5:11 # @Author : huiqin # @File : streamingKafka.py # @Description : Class is for from pyspark import SparkContext,SparkConf from pyspark.streaming import StreamingContext from pyspark.streaming.kafka import KafkaUtils def start(): sconf=SparkConf() sconf.set('spark.cores.max' , 8) sc=SparkContext(appName='KafkaDirectWordCount',conf=sconf) ssc=StreamingContext(sc,2) brokers="server1-2-5-24-138:9092,server1-3-5-24-139:9092,server1-4-5-24-140:9092" topic='spark_streaming_test_topic' kafkaStreams = KafkaUtils.createDirectStream(ssc,[topic],kafkaParams={"metadata.broker.list": brokers}) #统计生成的随机数的分布情况 result=kafkaStreams.map(lambda x:(x[0],1)).reduceByKey(lambda x, y: x + y) #打印offset的情况,此处也可以写到Zookeeper中 #You can use transform() instead of foreachRDD() as your # first method call in order to access offsets, then call further Spark methods. kafkaStreams.transform(storeOffsetRanges).foreachRDD(printOffsetRanges) result.pprint() ssc.start() # Start the computation ssc.awaitTermination() # Wait for the computation to terminate offsetRanges = [] def storeOffsetRanges(rdd): global offsetRanges offsetRanges = rdd.offsetRanges() return rdd def printOffsetRanges(rdd): for o in offsetRanges: print("%s %s %s %s %s" % (o.topic, o.partition, o.fromOffset, o.untilOffset,o.untilOffset-o.fromOffset)) if __name__ == '__main__': start()
true
f3f3557c1e39c6c3669addca5766426b816ff0e1
Python
EsJuan-az/EjerciciosAlgoritmia-Python
/A- Feedback/Feedback9.py
UTF-8
1,114
4.40625
4
[]
no_license
""" Escribir una función que simule una calculadora científica que permita calcular el seno, coseno, tangente, exponencial y logaritmo neperiano. La función preguntará al usuario el valor y la función a aplicar, y mostrará por pantalla una tabla con los enteros de 1 al valor introducido y el resultado de aplicar la función a esos enteros. """ from math import sin,cos,tan,log,e,radians,degrees,exp def Calculadora(op,dato1): if op == "sin": return f"sin({dato1}) = {degrees(sin(radians(dato1)))}" elif op == "cos": return f"cos({dato1}) = {degrees(cos(radians(dato1)))}" elif op == "tan": return f"tan({dato1}) = {degrees(tan(radians(dato1)))}" elif op == "ln": return f"ln({dato1}) = {log(dato1,e)}" elif op == "exp": return f"exp({dato1}) = {exp(dato1)}" opPosibles = ["sin","ln","cos","tan","exp"] op = input("Que operacion desea realizar?\n") numero = int(input("Hasta que numero desea realizarla?\n")) if op.lower() in opPosibles: for i in range(1,numero + 1): print(Calculadora(op,i)) else: print("Operacion no disponible")
true
f941d78681fd426358cc8ec7d48feffebe0cdac8
Python
ultraman-agul/python_demos
/实验报告/初始python01/p1_1.py
UTF-8
481
3.921875
4
[]
no_license
# -*- coding: utf-8 -*- # @Author : agul # @Date : 2020/10/28 10:56 # Software : PyCharm # version: Python 3.7 # @File : p1_1.py # description : 输入三角形的三条边,用海伦公式计算三角形的面积s import math a = eval(input("请输入a边长:")) b = eval(input("请输入b边长:")) c = eval(input("请输入c边长:")) p = (a + b + c) / 2 s = math.sqrt(p * (p - a) * (p - b) * (p - c)) print("三角形的面积是:{:.2f}".format(s))
true
c7a0a79ac4130101b234d281daef0bca2c908c76
Python
mikolajmale/SmartVape
/scripts/serial_live_plot.py
UTF-8
4,720
3.03125
3
[]
no_license
import sys, serial, argparse from collections import deque from enum import Enum import csv from typing import List, Callable import os import matplotlib.pyplot as plt import matplotlib.animation as animation class SerialLineParser: def __init__(self, delimeter=',', arg_num: int = 2, msg_min_val: List[float] = None, msg_max_val: List[float] = None): self.__delimeter = delimeter self.__arg_num = arg_num self.__last_valid = False self.__last_value = None self.__msg_min_val = msg_min_val self.__msg_max_val = msg_max_val if msg_min_val is not None: assert len(msg_min_val) == arg_num, 'Min param list does not have proper dimensions' if msg_max_val is not None: assert len(msg_max_val) == arg_num, 'Max param list does not have proper dimensions' def __call__(self, msg: str): self.__last_valid = self.__check_if_valid_msg(msg) if self.__last_valid: self.__last_value = [float(x) for x in msg.split(self.__delimeter)] self.__truncate_msg_data(self.__last_value) return self.__last_value else: return self.__last_value if self.__last_value else [0.0] * self.__arg_num def __truncate_msg_data(self, data: List[float]) -> List[float]: if self.__msg_min_val is not None: for i, min in enumerate(self.__msg_min_val): if data[i] < min: data[i] = min if self.__msg_max_val is not None: for i, max in enumerate(self.__msg_max_val): if data[i] > max: data[i] = max return data def __check_if_valid_msg(self, msg: str) -> bool: data = msg.split(self.__delimeter) if not isinstance(data, list): return False if len(data) != self.__arg_num: return False bool_l = [d.isdigit() for d in data] return all(bool_l) @property def valid(self): return self.__last_valid @property def param_num(self): return self.__arg_num class AnalogPlot: def __init__(self, ax, parser=SerialLineParser(), str_port='/dev/ttyACM0', baud=115200, timeout=10, max_buf_len=500): # open serial port self.ser = serial.Serial(str_port, baud, timeout=timeout) self.parser = parser self.signals = [deque([0] * max_buf_len) for i in range(parser.param_num)] self.ax = ax self.maxLen = max_buf_len self.__new_data: bool = False self.axes = [] for i in range(parser.param_num): a, = ax.plot([], []) self.axes.append(a) def __add_to_buf(self, buf, val): if len(buf) < self.maxLen: buf.append(val) else: buf.pop() buf.appendleft(val) def add(self, data): self.__new_data = True for i, s in enumerate(self.signals): self.__add_to_buf(s, data[i]) # update plot def update(self, frameNum, external_functions: List[Callable] = None): try: line = self.ser.readline().decode('utf-8').rstrip() if not line: return data = self.parser(line) self.add(data) if external_functions: for func in external_functions: func(data) for i, ax in enumerate(self.axes): ax.set_data(range(self.maxLen), self.signals[i]) self.ax.relim() self.ax.autoscale_view(True, True, True) except KeyboardInterrupt: print('exiting') def __del__(self): print("serial closed") self.ser.flush() self.ser.close() class CsvWriter: def __init__(self, fn: str, header: List[str] = None): self.__fn = fn self.__file = open(fn, 'w+') self.__writer = csv.writer(self.__file) if header is not None: self.__writer.writerow(header) def __del__(self): print(f"file [{self.__fn}] closed") self.__file.close() def write(self, data: List[float]): self.__writer.writerow(data) def main(): # set up animation fig = plt.figure() ax = plt.axes() parser = SerialLineParser(delimeter=',', arg_num=2, msg_min_val=[0.0, 0.0], msg_max_val=[75000, 75000]) analog_plot = AnalogPlot(ax=ax, parser=parser, max_buf_len=100) wd = os.path.dirname(os.path.realpath(__file__)) csv_writer = CsvWriter(os.path.join(wd, 'ppg.csv'), header=['IR', 'RED']) callables = [csv_writer.write] anim = animation.FuncAnimation(fig, analog_plot.update, fargs=(callables,), interval=1) # show plot plt.show() # call main if __name__ == '__main__': main()
true
d650c4afa97c57556cd0bb8644e347ba7bc52480
Python
fabiourias/520
/funcoes.py
UTF-8
1,821
3.59375
4
[]
no_license
#!/usr/bin/python3 #encoding: utf-8 from def boas_vindas(nome='Daniel', idade=24): nome = input('Digite o seu nome:') idade = int(input('Digite sua idade:')) dic = {'nome': nome, 'idade': idade} return dic def boas_vindas02(*args): for x in args: print('Seja bem vindo: {}!'.format(x)) def boas_vindas03(**kwargs): ''' funcao de boas vindas! ''' for x,y in kwargs.items(): #.values .keys .items print (x,y, sep=':') def calculo_total(**produto): ''' funcao calculo soma o total de cada item! ''' a = produto['qtde'] b = produto['valor'] c = produto['nome'] result = 'Produto: {}, Total: {}'.format(c, a * b) return result def boas_vindas04(): print ('Seja bem vindo!') def ola(nome): print('Ola {}!'.format(nome)) boas_vindas04() #ola('Daniel') #for nome in nomes: # boas_vindas(nome) def gravar_log(log): with open('python.log', 'a') as arq: arq.write(log) def soma(x ,y): return (x + y) def open_file(nome): try: with open(nome, 'r') as arquivo: return arquivo.readlines() except Exception as e: return 'Falha ao ler o arquivo informado: {}'.format(e) def format_file(nome, modo, conteudo=None): if modo.lower() == 'r': try: with open(nome, modo) as arquivo: return arquivo.readlines() except Exception as e: result = 'Falha ao ler o arquivo informado: {} [error] - [{}]'.format(e) gravar_log(result) return result elif modo == 'a': try: with open(nome, modo) as arquivo: arquivo.write(conteudo + '\n') return True except Exception as e: return 'Falha ao escrever no arquivo: {} !'.format(e)
true
2a96946ba686a7a1d5903a949c583e63a1ad9946
Python
hpcaitech/ColossalAI
/applications/Chat/evaluate/unieval/scorer.py
UTF-8
4,578
2.75
3
[ "BSD-3-Clause", "LicenseRef-scancode-warranty-disclaimer", "Apache-2.0", "BSD-2-Clause", "MIT" ]
permissive
# MIT License # Copyright (c) 2022 Ming Zhong # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import torch import torch.nn as nn from tqdm import tqdm from transformers import AutoConfig, AutoModelForSeq2SeqLM, AutoTokenizer class UniEvaluator: def __init__(self, model_name_or_path, max_length=1024, device='cuda:0', cache_dir=None): """ Set up model """ self.device = device self.max_length = max_length self.config = AutoConfig.from_pretrained(model_name_or_path, cache_dir=cache_dir) self.tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, cache_dir=cache_dir) self.model = AutoModelForSeq2SeqLM.from_pretrained(model_name_or_path, config=self.config, cache_dir=cache_dir) self.model.eval() self.model.to(device) self.softmax = nn.Softmax(dim=1) self.pos_id = self.tokenizer("Yes")["input_ids"][0] self.neg_id = self.tokenizer("No")["input_ids"][0] def score(self, inputs, task, category, dim, batch_size=8): """ Get scores for the given samples. final_score = postive_score / (postive_score + negative_score) """ # The implementation of "forward" in T5 still requires decoder_input_ids. # Therefore, we construct a random one-word target sequence. # The content of the target has no effect on the final scores. tgts = ["No" for _ in range(len(inputs))] pos_score_list, neg_score_list = [], [] for i in tqdm(range(0, len(inputs), batch_size), desc=f"{category}-({dim}-{task}): "): src_list = inputs[i:i + batch_size] tgt_list = tgts[i:i + batch_size] try: with torch.no_grad(): encoded_src = self.tokenizer(src_list, max_length=self.max_length, truncation=True, padding=True, return_tensors='pt') encoded_tgt = self.tokenizer(tgt_list, max_length=self.max_length, truncation=True, padding=True, return_tensors='pt') src_tokens = encoded_src['input_ids'].to(self.device) src_mask = encoded_src['attention_mask'].to(self.device) tgt_tokens = encoded_tgt['input_ids'].to(self.device)[:, 0].unsqueeze(-1) output = self.model(input_ids=src_tokens, attention_mask=src_mask, labels=tgt_tokens) logits = output.logits.view(-1, self.model.config.vocab_size) pos_score = self.softmax(logits)[:, self.pos_id] # Yes neg_score = self.softmax(logits)[:, self.neg_id] # No cur_pos_score = [x.item() for x in pos_score] cur_neg_score = [x.item() for x in neg_score] pos_score_list += cur_pos_score neg_score_list += cur_neg_score except RuntimeError: print(f'source: {src_list}') print(f'target: {tgt_list}') exit(0) score_list = [] for i in range(len(pos_score_list)): score_list.append(pos_score_list[i] / (pos_score_list[i] + neg_score_list[i])) return score_list
true
b9955772c18cbbbea7666a68a4eb425128ce5d1e
Python
amirkashi/CSCI_5622
/HW_3/CNN.py
UTF-8
2,533
2.875
3
[]
no_license
import argparse import pickle import gzip from collections import Counter, defaultdict import keras from keras.models import Sequential from keras.layers import Conv2D from keras.layers import Dense from keras.layers import MaxPooling2D from keras.layers import Dropout from keras.layers import Flatten from keras.layers import Activation from keras.layers.core import Reshape from keras.utils import to_categorical from keras import backend as K class Numbers: def __init__(self, location): with gzip.open(location, 'rb') as f: train_set, valid_set, test_set = pickle.load(f) self.train_x, self.train_y = train_set self.test_x, self.test_y = valid_set class CNN: def __init__(self, train_x, train_y, test_x, test_y, epoches=10, batch_size=128): self.batch_size = batch_size self.epoches = epoches n = 1 width = 28 height = 28 self.train_x = train_x.reshape(train_x.shape[0], width, height, n) self.test_x = test_x.reshape(test_x.shape[0], width, height, n) self.train_x = self.train_x.astype('float32') self.test_x = self.test_x.astype('float32') self.train_y = to_categorical(train_y) self.test_y = to_categorical(test_y) self.model = Sequential() self.model.add(Conv2D(32, 5, 5, activation='relu', input_shape=(28, 28, 1))) self.model.add(Conv2D(32, 5, 5, activation='relu')) self.model.add(MaxPooling2D(pool_size=(2, 2))) self.model.add(Dropout(0.25)) self.model.add(Flatten()) self.model.add(Dense(128, activation='relu')) self.model.add(Dropout(0.5)) self.model.add(Dense(10, activation='softmax')) self.model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) def train(self): self.model.fit(self.train_x, self.train_y, batch_size=self.batch_size , nb_epoch=self.epoches, verbose=1) def evaluate(self): acc = self.model.evaluate(self.test_x, self.test_y) return acc if __name__ == '__main__': parser = argparse.ArgumentParser(description='CNN classifier options') parser.add_argument('--limit', type=int, default=-1, help='Restrict training to this many examples') args = parser.parse_args() data = Numbers("../data/mnist.pkl.gz") cnn = CNN(data.train_x[:args.limit], data.train_y[:args.limit], data.test_x, data.test_y) cnn.train() acc = cnn.evaluate() print(acc)
true
3abe00792d99dd79b6587f9a9a23a71a790b0971
Python
danisuar3z/ejercicios-python
/tabla_informe_viejo.py
UTF-8
3,021
3.953125
4
[]
no_license
# tabla_informe.py # Daniel T. Suarez # Ejercicio 2.33: Un desafio de formato # Ejercicio 6.2: Funcion main() import csv def leer_camion(nombre_archivo): """ Recibe una ruta de archivo .csv que contenga los datos de carga de un camion de frutas y devuelve una lista de diccionarios con toda la informacion de cada fruta. """ camion = [] with open(nombre_archivo, 'rt') as f: headers = next(f).split(',') headers[-1] = headers[-1].replace('\n','') rows = csv.reader(f) for i, row in enumerate(rows, 1): try: fruta = dict(zip(headers, row)) fruta['cajones'] = int(fruta['cajones']) fruta['precio'] = float(fruta['precio']) camion.append(fruta) except ValueError: print(f'Error en linea {i}: No se puede interpretar ' f'{row}') return camion def leer_precios(nombre_archivo): """ Recibe una ruta de archivo .csv que contenga los precios de venta de cada fruta y devuelve un diccionario con los nombres como clave y los precios como valores. """ precios = {} with open(nombre_archivo, 'rt') as f: rows = csv.reader(f) for row in rows: try: precios[row[0]] = float(row[1]) except IndexError: pass return precios def hacer_informe(camion, precios): """ Toma una lista de diccionarios de las frutas cargadas en el camion y un diccionario de precios y devuelve una lista de tuplas que incluye el cambio entre el precio y el costo de cada cajon. """ lista_total = [] for fruta in camion: tupla_fruta = (fruta['nombre'], fruta['cajones'], fruta['precio'], (precios[fruta['nombre']] - fruta['precio'])) lista_total.append(tupla_fruta) return lista_total def imprimir_informe(informe): ''' Recibe un informe y printea una tabla para mejor visualización. ''' print(f' {"Fruta":^9} | {"Cajones":^8} | {"Precio":^8} | {"Cambio":^8}') print('-'*44) for nombre, cajones, precio, cambio in informe: print(f' {nombre:<9s} | {cajones:>8d} | ' f'{"$" + str(f"{precio:.2f}"):>8s} | {cambio:>8.2f} ') def informe_camion(archivo_camion='Data/camion.csv', archivo_precios='Data/precios.csv'): ''' Llama a las funciones necesarias a partir de los nombres de archivo para imprimir el informe final. Tiene valores por defecto para los archivos. ''' camion = leer_camion(archivo_camion) precios = leer_precios(archivo_precios) informe = hacer_informe(camion, precios) return imprimir_informe(informe) def main(argv): ''' ''' return informe_camion(argv[1], argv[2]) if __name__ == '__main__': import sys main(sys.argv) ''' informe_camion() print() informe_camion(archivo_camion='Data/missing.csv') '''
true
db2e632007be201d20b50b5c6149efc32d125e5e
Python
garryjable/TelloDroneMk2
/unit_tests.py
UTF-8
8,324
2.53125
3
[]
no_license
# import test_server from models import ( MissionFactory, MissionLibrary, DroneStatusStore, DroneDispatcher, SlowMission, FastMission, VerboseFastMission) from simulator import DroneSimulator, DroneMonitor import unittest import socket import time import mission_data class DroneDispatcherTests(unittest.TestCase): def setUp(self): self.host = "127.0.0.1" self.drone_port = 8891 mission_factory = MissionFactory() self.missions = mission_factory.create_missions(mission_data) self.dispatcher = DroneDispatcher(self.host, self.drone_port) def test_get_set_info(self): new_host = 8888 new_port = "192.168.10.1" host = self.dispatcher.get_host() port = self.dispatcher.get_port() self.assertEqual(host, self.host) self.assertEqual(port, self.drone_port) self.dispatcher.set_port(new_port) self.dispatcher.set_host(new_host) host = self.dispatcher.get_host() port = self.dispatcher.get_port() self.assertEqual(host, new_host) self.assertEqual(port, new_port) def test_send_drone_on_mission(self): self.drone_simulator = DroneSimulator() self.drone_simulator.start_listening() response = self.dispatcher.send_drone_on_mission(self.missions["1"]) self.assertEqual(response, "you flew mission 1") print("waiting for drone simulator to time out") for i in range(15,0, -1): print(i) time.sleep(1) self.drone_simulator.stop_reporting() self.drone_simulator.stop_listening() self.drone_simulator.close_socket() def tearDown(self): self.dispatcher.close_socket() class MissionsTests(unittest.TestCase): def setUp(self): mission_factory = MissionFactory() self.missions = mission_factory.create_missions(mission_data) def dummy_send_command(message): return message self.send_command = lambda message: dummy_send_command(message) def test_fast_mission(self): response = self.missions["1"].execute(self.send_command) def test_slow_mission(self): response = self.missions["2"].execute(self.send_command) def test_verbose_fast_mission(self): response = self.missions["3"].execute(self.send_command) class MissionFactoryTests(unittest.TestCase): def test_create_missions_from_py(self): mission_factory = MissionFactory() missions = mission_factory.create_missions(mission_data) index = 0 for mission_name, mission in missions.items(): self.assertEqual(mission_name, mission_data.data[index]['name']) if isinstance(mission, SlowMission): mission_type = 'slow' elif isinstance(mission, FastMission): mission_type = 'fast' elif isinstance(mission, VerboseFastMission): mission_type = 'verbosefast' self.assertEqual(mission_type, mission_data.data[index]['type']) index += 1 def test_create_missions_from_json(self): import json f = open("mission_data.json", 'r') json_data = json.load(f) mission_factory = MissionFactory() missions = mission_factory.create_missions_from_file("mission_data.json") index = 0 for mission_name, mission in missions.items(): self.assertEqual(mission_name, json_data['data'][index]['name']) if isinstance(mission, SlowMission): mission_type = 'slow' elif isinstance(mission, FastMission): mission_type = 'fast' elif isinstance(mission, VerboseFastMission): mission_type = 'verbosefast' self.assertEqual(mission_type, json_data['data'][index]['type']) index += 1 def test_create_missions_from_csv(self): import csv rows = [] with open("mission_data.csv", "r") as csvfile: mission_data_reader = csv.reader(csvfile, delimiter=",") for row in mission_data_reader: rows.append(row) rows.pop(0) mission_factory = MissionFactory() missions = mission_factory.create_missions_from_file("mission_data.csv") index = 0 for mission_name, mission in missions.items(): self.assertEqual(mission_name, rows[index][0]) if isinstance(mission, SlowMission): mission_type = 'slow' elif isinstance(mission, FastMission): mission_type = 'fast' elif isinstance(mission, VerboseFastMission): mission_type = 'verbosefast' self.assertEqual(mission_type, rows[index][2]) index += 1 class MissionFlyerTests(unittest.TestCase): def test_fly_mission(self): print("stuff") class MissionLibraryTests(unittest.TestCase): def setUp(self): mission_factory = MissionFactory() self.missions = mission_factory.create_missions(mission_data) self.library = MissionLibrary(self.missions) def test_get_mission_names(self): mission_names = self.library.get_mission_names() self.assertEqual(mission_names, list(self.missions.keys())) def test_get_missions(self): name = "1" mission = self.library.get_mission(name) self.assertEqual(mission, self.missions[name]) def test_add_new_missions(self): mission_factory = MissionFactory() more_missions = mission_factory.create_missions(mission_data) self.library.add_missions(more_missions) class DroneStatusStoreTests(unittest.TestCase): def setUp(self): self.initial_status = "pitch:0;roll:0;yaw:0;vgx:0;vgy:0;vgz:0;templ:0;temph:0;tof:0;h:0;bat:100;baro:0.00;time:0;agx:0.00;agy:0.00;agz:0.00;\r\n" self.initial_status_dict = { "pitch": 0, "roll": 0, "yaw": 0, "vgx": 0, "vgy": 0, "vgz": 0, "templ": 0, "temph": 0, "tof": 0, "h": 0, "bat": 100, "baro": 0.00, "time": 0, "agx": 0.00, "agy": 0.00, "agz": 0.00, } self.status_store = DroneStatusStore() def test_get_latest_status(self): latest_status = self.status_store.get_latest_status() self.assertEqual(latest_status, self.initial_status) def test_update_status(self): new_status = "pitch:0;roll:0;yaw:0;vgx:0;vgy:0;vgz:0;templ:0;temph:0;tof:0;h:0;bat:100;baro:1.00;time:3;agx:0.00;agy:0.00;agz:0.00;\r\n" self.status_store.update_latest_status(new_status) latest_status = self.status_store.get_latest_status() self.assertEqual(new_status, latest_status) def test_get_status_dict(self): status_dict = self.status_store.get_latest_status_dict() self.assertEqual(status_dict, self.initial_status_dict) def test_update_status_with_dict(self): status_dict = self.status_store.get_latest_status_dict() status_dict["h"] = 100 self.status_store.update_latest_status_with_dict(status_dict) latest_status_dict = self.status_store.get_latest_status_dict() self.assertEqual(latest_status_dict, status_dict) class DroneMonitorTests(unittest.TestCase): def test_listen_for_status(self): self.host = "127.0.0.1" self.drone_port = 8891 mission_factory = MissionFactory() self.missions = mission_factory.create_missions(mission_data) self.drone_simulator = DroneSimulator() self.drone_simulator.start_listening() self.drone_monitor = DroneMonitor() self.dispatcher = DroneDispatcher(self.host, self.drone_port) response = self.dispatcher.send_drone_on_mission(self.missions["1"]) self.drone_simulator.stop_reporting() self.drone_simulator.stop_listening() self.drone_monitor.stop_listening() latest_status_dict = self.drone_monitor.status_store.get_latest_status_dict() self.assertNotEqual(0, latest_status_dict['time']) self.dispatcher.close_socket() self.drone_monitor.close_socket() self.drone_simulator.close_socket()
true
bad0e565ce46a8040096b78324916cf97abee586
Python
imrehg/epc-data-import
/processing/app/processor.py
UTF-8
9,862
3.1875
3
[ "MIT" ]
permissive
"""EPC data importer / processor This module implements a proof-of-concept EPC data importer and processor. It is tasked with taking a zip file, extracting records, processing them, and saving them into a database Attributes (module level): LIMIT_DB_CONNECT_RETRIES: number of retries when trying to connect to the database """ import csv import io import logging import multiprocessing as mp import os import queue import sys import time import zipfile import psycopg2 # Basic logger setup logging.basicConfig() logger = logging.getLogger() logger.setLevel("INFO") # Maximum number of times to retry connecting to the database on startup LIMIT_DB_CONNECT_RETRIES = 10 ### # Record processing functions ### def parseRow(row): """ Parse a single row of records, extracting the relevant fields Args: row: a single row Return: A dict with the extracted record """ try: record = { "lmk_key": row['LMK_KEY'], "lodgement_date": row['LODGEMENT_DATE'], "transaction_type": row['TRANSACTION_TYPE'], "total_floor_area": row['TOTAL_FLOOR_AREA'], "addtess": row['ADDRESS'], "postcode": row['POSTCODE'], } except KeyError: record = None return record def process_row(input_queue, record_queue): """ Process a single row of input, and pass it on to record handling queue Args: input_queue: a queue with lines of records to process (joinable) record_queue: a queue to submit processed record (dicts) to Return: None """ while True: try: row = input_queue.get(True, 10) record = parseRow(row) if record: # If there's a successfully extracted record continue # Simulate long-ish API call time.sleep(0.250) # Dummy values record['latitude'] = 0.0 record['longitude'] = 0.0 # Put the record onto the record queue to be added to the database record_queue.put(record) # Feed back to the input queue for job counting input_queue.task_done() except queue.Empty: # Keep waiting for data to process pass ### # Database functions ### def connect_db(): """ Connect to the external database Args: None Environment variables: POSTGRES_USER: username to connect with POSTGRES_PASSWORD: password to connect with POSTGRES_DB: database name to connect to Globals: LIMIT_DB_CONNECT_RETRIES: maxium count to retry (with delay) Return: An establised connection """ conn = None retry_counter = 0 while not conn: try: conn = psycopg2.connect(user=os.getenv("POSTGRES_USER"), password=os.getenv("POSTGRES_PASSWORD"), host="db", database=os.getenv("POSTGRES_DB")) except psycopg2.OperationalError: if retry_counter >= LIMIT_DB_CONNECT_RETRIES: raise retry_counter += 1 time.sleep(5) return conn def create_tables(conn): """ Create tables for the data import database if the do not exist Args: conn: an established connection Return: None """ command = """ CREATE TABLE epc ( lmk_key VARCHAR(255) PRIMARY KEY, lodgement_date DATE, transaction_type VARCHAR(255), total_foor_area FLOAT, address VARCHAR(255), postcode VARCHAR(255), latitude FLOAT, longitude FLOAT ) """ try: cur = conn.cursor() # create table cur.execute(command) cur.close() except (Exception, psycopg2.DatabaseError) as error: if error.__class__.__name__ == "DuplicateTable": logger.debug("Database table already exists.") else: raise error def create_records(record_queue, conn): """ Worker process to add pre-processed records to the database Args: record_queue: a queue to pop off items to put into the database (joinable) conn: an established database connection Return: None """ # The import SQL, if there's an item with the given 'lmk_key', ignore sql = """ INSERT INTO epc (lmk_key, lodgement_date, transaction_type, total_foor_area, address, postcode, latitude, longitude) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) ON CONFLICT (lmk_key) DO NOTHING; """ # # An alternate version, to update any record that is matched with the given key # sql = """ # INSERT INTO epc (lmk_key, lodgement_date, transaction_type, total_foor_area, address, postcode, latitude, longitude) # VALUES (%s, %s, %s, %s, %s, %s) # ON CONFLICT (lmk_key) # DO # UPDATE # SET lodgement_date = EXCLUDED.lodgement_date, # transaction_type = EXCLUDED.transaction_type, # total_foor_area = EXCLUDED.total_foor_area, # address = EXCLUDED.address, # postcode = EXCLUDED.postcode; # latitude = EXCLUDED.latitude; # longitude = EXCLUDED.longitude; # """ # Start the worker process while True: try: # pop off a record from the queue if there's one record = record_queue.get(True, 10) try: # Add item to the database cur = conn.cursor() cur.execute(sql, (record['lmk_key'], record['lodgement_date'], record['transaction_type'], record['total_floor_area'], record['addtess'], record['postcode'], record['latitude'], record['longitude'], ) ) cur.close() except (Exception, psycopg2.OperationalError) as error: raise error finally: # Feed back to the queue for job counting record_queue.task_done() except queue.Empty: # Just wait for new tasks pass ### # Archive handling functions ### def archive_enqueue(import_file, processing_queue): """ Read the input CSV from a zip file and push lines onto the processing queue Args: import_file: file to load (zip) processing_queue: where to enqueue the read lines for further processing Environment variables: MAXRECORDS: number of records to process, if 0 then all (default 0) Return: The number of records enqueued """ # For this exercise, potentially limit the number of records to process # This is to set a manageable limit, specific for this case max_records = int(os.getenv("MAXRECORDS", 0)) max_records_reached = False counter = 0 logger.info(f'Import file: {import_file}') input_archive = zipfile.ZipFile(import_file) for filename in input_archive.namelist(): logger.info(f'Assessing file: {filename}') if os.path.basename(filename) == "certificates.csv": with input_archive.open(filename) as csvfile: items_file = io.TextIOWrapper(csvfile) reader = csv.DictReader(items_file, delimiter=',', quotechar='"') for row in reader: counter += 1 processing_queue.put(row) if max_records > 0 and counter >= max_records: max_records_reached = True break if max_records_reached: break return counter def main(import_file): """ The main process of data processing task Args: import_file: file to load items from Environment variables: THREADS: number of workers in the processing worker pool (default 100) Return None """ # Set up database connection conn = connect_db() # Enable AUTOCOMMIT for direct writes conn.set_session(autocommit=True) # Create the requited database tables if they don't exists yet create_tables(conn) ## Task management setup # Database queue and worker setup db_queue = mp.JoinableQueue() db_process = mp.Process(target=create_records, args=(db_queue, conn)) db_process.start() # Imput processor queue and worker pool setup processing_queue = mp.JoinableQueue() processing_pool = mp.Pool(int(os.getenv("THREADS", 100)), process_row, (processing_queue, db_queue)) total_records = archive_enqueue(import_file, processing_queue) logger.info("Processing enqueued tasks") while True: # Check queue sizes, and display them in the logs qsize = processing_queue.qsize() qsize2 = db_queue.qsize() logger.info(f'Input queue: {qsize} | database queue: {qsize2}') if qsize == 0 and qsize2 == 0: # When queues finished, stop looping break time.sleep(1) # Finish and clean up queues, workers, and connections processing_queue.join() processing_pool.close() db_queue.join() db_process.terminate() conn.close() # Job results logger.info(f'Processed {total_records} records') if __name__ == "__main__": try: import_file_arg = sys.argv[1] except IndexError: print("Input file argument not passed", file=sys.stderr) raise main(import_file_arg)
true
701f594d5044a196f22c002e1aa4db66c1b34386
Python
bazz2/c-language
/python/robot.py
UTF-8
6,889
3.484375
3
[]
no_license
#coding=utf8 import random import itchat from itchat.content import * all_jokes = [ '小白兔蹦蹦跳跳到面包房,问:“老板,你们有没有一百个小面包啊?” 老板:“啊,真抱歉,没有那么多” “这样啊。。。”小白兔垂头丧气地走了。\ 第二天,小白兔蹦蹦跳跳到面包房,“老板,有没有一百个小面包啊?” 老板:“对不起,还是没有啊” “这样啊。。。”小白兔又垂头丧气地走了。\ 第三天,小白兔蹦蹦跳跳到面包房,“老板,有没有一百个小面包 啊?” 老板高兴的说:“有了,有了,今天我们有一百个小面包了!!” 小白兔掏出钱:“太好了,我买两个!”', '第一天,小白兔去河边钓鱼,什么也没钓到,回家了。\ 第二天,小白兔又去河边钓鱼,还是什么也没钓到,回家了。\ 第三天,小白兔刚到河边,一条大鱼从河里跳出来,冲着小白兔大叫:你他妈的要是再敢用胡箩卜当鱼饵,我就扁死你!', '熊和小白兔在森林里便便,完了熊问小白兔“你掉毛吗?” 小白兔说“不掉~” 于是熊就拿起小白兔擦屁股。', '有一只小白兔强奸了一只大灰狼,然后就跑了,大灰狼愤而追之, 小白兔眼看大灰狼快要追上了, 便在一棵树下坐下来, 戴起墨镜,拿张报纸看, 假装什么事也没有发生过, 这时大灰狼跑来了,看见坐在树下的小白兔, 问道:"有没有看见一只跑过去的小白兔!" 小白兔答道:"是不是一只非礼了大灰狼的小白兔?" 大灰狼大呼:"不会吧!这么快就上报纸了!!!" ', '有一天小白兔快乐地奔跑在森林中, 在路上它碰到一只正在卷大麻的长颈鹿, 小白兔对长颈鹿说: "长颈鹿长颈鹿,你为什么要做伤害自己的事呢? 看看这片森林多么美好,让我们一起在大自然中奔跑吧!" 长颈鹿看看大麻烟,看看小白兔,于是把大麻烟向身后一扔, 跟着小白兔在森林中奔跑. 后来它们遇到一只正在准备吸古柯碱的大象, 小白兔对大象说: "大象大象,你为什么要做伤害自己的事呢? 看看这片森林多么美好,让我们一起在大自然中奔跑吧!" 大象看看古柯碱,看看小白兔,于是把古柯碱向身后一扔, 跟着小白兔和长颈鹿在森林中奔跑. 后来它们遇到一只正在准备打海洛因的狮子, 小白兔对狮子说: "狮子狮子,你为什么要做伤害自己的事呢? 看看这片森林多么美好,让我们一起在大自然中奔跑吧!" 狮子看看针筒,看看小白兔,于是把针筒向身后一扔, 冲过去把小白兔狠揍了一顿. 大象和长颈鹿吓得直发抖:"你为什么要打小白兔呢? 它这么好心,关心我们的健康又叫我们接近大自然." 狮子生气地说:"这个混蛋小白兔,每次嗑了摇头丸就拉着我像白痴一样在森林里乱跑."', '三个小白兔采到一个蘑菇。两个大的让小的去弄一些野菜一起来吃, 小的说:我不去!我走了,你们就吃了我的蘑菇了……两个大的: 不会的,放心去把。于是小白兔就去了~~~半年过去了,小白兔还没回来。一个大的说:它不回来了,我门吃吧……另一个大的说:再等等吧~~~ 一年过去了小白兔还没回来……两个大的商量:不必等了,我们吃了吧……就在这时那个小的白兔突然从旁边丛林中跳出来,生气的说:看!我就知道你们要吃我的蘑菇!', '小白兔和大狗熊走在森林里,不小心踢翻一只壶。壶里出来一精灵,说可以满足它们各三个愿望。狗熊说,把它变成世界上最强壮的狗熊。它的愿望实现了。小白兔说,给它一顶小头盔。它的愿望也实现了。狗熊说,把它变成世界上最漂亮的狗熊。它的愿望又实现了。小白兔说,给它一辆自行车。它的愿望又实现了。狗熊说,把世界上其它的狗熊全变成母狗熊! 小白兔骑上自行车,一边跑一边说,把这只狗熊变成同性恋。', '小灰狼喜欢素食,小灰狼爸妈很苦恼。一日,见小灰狼狂追小白兔,甚喜,最终小灰狼摁住小白兔,恶狠狠地说:兔崽子,把胡萝卜交出来!', '小白兔:快问我。快问我。“你是小白兔么?”\ 大灰狼:“你是小白兔么?”\ 小白兔:“对啊对啊。我就是小白兔”\ 小白兔:快问我。快问我。“你是长颈鹿么?”\ 大灰狼:“你是长颈鹿么?”\ 小白兔:“你TM傻啊。我不告诉你我是小白兔了么?!”\ 大灰狼:“。。。”', '一天,有一只非常可爱的小白兔跑在大森林里,结果迷路了。这时它看到一只小黑兔,便跑去问:"小黑兔哥哥,小黑兔哥哥,我在大森林里迷路了,怎样才能走出大森林呀?"小黑兔问:"你想知道吗?"小白兔说:"想。"小黑兔说:"你想知道的话,就得先让我舒服舒服。"小白兔没法子,只好让小黑兔舒服舒服。小黑兔于是就告诉小白兔怎么走,小白兔知道了,就继续蹦蹦跳跳地往前跑。\ 跑着跑着,小白兔又迷路了,结果碰上一只小灰兔。小白兔便跑去问:"小灰兔哥哥,小灰兔哥哥,我在大森林里迷路了,怎样才能走出大森林呀?"小灰兔问:"你想知道吗?"小白兔说:"想。"小灰兔说:"你想知道的话,就得先让我舒服舒服。"小白兔没法子,只好让小灰兔也舒服舒服。小灰兔于是就告诉小白兔怎么走,小白兔知道了,就又继续蹦蹦跳跳地往前跑。\ 结果又迷路了,这时,它碰上一只小花兔,这回小白兔可学乖了,跑过去说:"小花兔哥哥,小花兔哥哥,你要是告诉我怎样才能走出大森林,我就让你舒服舒服。"\ 小花兔一听,登时抡圆了给小白兔一个大嘴巴,说:"我靠,你丫是问路呐,还是找办呐?" ', '小白兔约大灰狼去喝啤酒,大灰狼醉了,小白兔趁机把大灰狼给强奸了,过了几天小白兔又叫大灰狼去喝酒,大灰狼说:‘哎!不去了!不去了!,喝完啤酒屁股疼’', '话说有三只兔子拉便便。第一只拉出来的是长条的,第二只拉出来的是圆形的,第三只拉出的居然是六芒星型。为什么?另外两只兔子也问了,第三只兔子说,我用手捏的。', ] @itchat.msg_register([TEXT]) def text_reply(msg): if msg['Text'] == '讲个笑话': i = random.randrange(0, len(all_jokes)) #print(all_jokes[i], '@482f24ba78542b322e073d17293df80c') #itchat.send(all_jokes[i], '@482f24ba78542b322e073d17293df80c') print(all_jokes[i], msg['FromUserName']) itchat.send(all_jokes[i], msg['FromUserName']) return #return all_jokes[i] itchat.auto_login(hotReload=True) itchat.run()
true
5f6f4d66829ec1c5867328e16b65b9430a5debb0
Python
zahan-ict/Uncertainty-Calculation
/ucom.py
UTF-8
16,886
2.90625
3
[]
no_license
#!/usr/bin/env python 3.6.4 # -*- coding: utf-8 -*- """ Complex Uncertainty Calculation @developer: Md Sarwar Zahan Matrix: 01461419 University of Klagenfurt """ import inspect from unc import Unc import numpy as np import math import cmath import sympy class Ucom(Unc): comp_value = 0 id_check = [0] comp_unc = 0 name = '' dep = '' # Constructor definition ############################################## def __init__(self, comp_value, comp_unc=None, name=None, dep=None): self.comp_value = comp_value self.comp_unc = comp_unc self.name = name self.dep = dep # Default string function for formatting output ############################### def __str__(self): comp_value = rmParen(self.comp_value) comp_unc = rmParen(self.comp_unc) Unc.nameANDdep(self.name, self.dep) return "%s[%s]" % (comp_value, comp_unc) # Overloading "+" Addition Operator For Complex Calculation def __add__(self, other): # Check the value if it is scalar or vector if isinstance(self.comp_value, np.ndarray) and isinstance(self.comp_unc, np.ndarray): if self.comp_value.shape[0] == self.comp_unc.shape[0]: C_V1 = self.comp_value C_U1 = self.comp_unc C_V2 = other.comp_value C_U2 = other.comp_unc M1 = 0 for U in C_U1: a = np.power(U, 2) M1 += a comp_unc = np.sqrt(M1) M2 = 0 for U2 in C_U2: a2 = np.power(U2, 2) M2 += a2 other_comp_unc = np.sqrt(M2) new_comp_unc = np.sqrt(np.power(comp_unc, 2) + np.power(other_comp_unc, 2)) v_sum = np.sum(C_V1) + np.sum(C_V2) # Complex Uncertainty calculation for addition return Ucom(v_sum, new_comp_unc, self.name, self.dep) else: # Perform complex addition calculation comp_value = np.add(self.comp_value,other.comp_value) # check variable correlation on complex addition if self is other: comp_unc = np.subtract(self.comp_unc, other.comp_unc) Ucom.id_check = id(self), id(other) elif Ucom.id_check[0] == id(other): comp_unc = np.subtract(self.comp_unc, other.comp_unc) else: comp_unc = np.sqrt(np.power(self.comp_unc, 2) + np.power(other.comp_unc, 2)) # General Formula of Uncertainty for Subtraction return Ucom(comp_value, comp_unc, self.name, self.dep) # Overloading "-" Subtraction Operator For Complex Calculation def __sub__(self, other): # Check the value if it is scalar or vector if isinstance(self.comp_value, np.ndarray) and isinstance(self.comp_unc, np.ndarray): if self.comp_value.shape[0] == self.comp_unc.shape[0]: C_V1 = self.comp_value C_U1 = self.comp_unc C_V2 = other.comp_value C_U2 = other.comp_unc M1 = 0 for U in C_U1: a = np.power(U, 2) M1 += a comp_unc = np.sqrt(M1) v1 = 0 for U2 in C_U2: a2 = np.power(U2, 2) v1 += a2 other_comp_unc = np.sqrt(v1) new_comp_unc = np.sqrt(np.power(comp_unc, 2) + np.power(other_comp_unc, 2)) v_sub = np.sum(C_V1) - np.sum(C_V2) # Complex Uncertainty calculation for subtraction return Ucom(v_sub, new_comp_unc, self.name, self.dep) else: # Perform complex subtraction calculation comp_value = np.subtract(self.comp_value, other.comp_value) # check variable correlation on complex subtraction if self is other: comp_unc = np.subtract(self.comp_unc, other.comp_unc) Ucom.id_check= id(self), id(other) elif Ucom.id_check[0] == id(other): comp_unc = np.subtract(self.comp_unc, other.comp_unc) else: comp_unc = np.sqrt(np.power(self.comp_unc, 2) + np.power(other.comp_unc,2)) # General Formula of Uncertainty for Subtraction return Ucom(comp_value, comp_unc, self.name, self.dep) # Overloading "*" multiplication Operator For Complex Calculation def __mul__(self, other): # Check the value if it is scalar or vector if isinstance(self.comp_value, np.ndarray) and isinstance(self.comp_unc, np.ndarray): if self.comp_value.shape[0] == self.comp_unc.shape[0]: C_V1 = self.comp_value C_U1 = self.comp_unc C_V2 = other.comp_value C_U2 = other.comp_unc # Perform value multiplication mul = 0 for U in C_U1: a = np.power(U, 2) # do sum mul += a comp_unc = np.sqrt(mul) comp_value = np.sum(C_V1) * C_V2 return Ucom(comp_value, comp_unc, self.name, self.dep) else: print("Error: Number of element of values and uncertainties must be same") else: # Perform complex multiplication calculation comp_value = np.multiply(self.comp_value, other.comp_value) # check variable correlation on complex division if self is other: comp_unc = np.divide(self.comp_unc, other.comp_unc) Ucom.id_check = id(self), id(other) elif Ucom.id_check[0] == id(other): comp_unc = np.divide(self.comp_unc, other.comp_unc) else: comp_unc = comp_value * (np.sqrt(np.power(self.comp_unc / self.comp_value, 2) + np.power(other.comp_unc / other.comp_value, 2))) # General Formula of Uncertainty for multiplication return Ucom(comp_value, comp_unc, self.name, self.dep) # Overloading "/" Division Operator For Complex Calculation def __truediv__(self, other): # Check the value if it is scalar or vector if isinstance(self.comp_value, np.ndarray) and isinstance(self.comp_unc, np.ndarray): if self.comp_value.shape[0] == self.comp_unc.shape[0]: C_V1 = self.comp_value C_U1 = self.comp_unc C_V2 = other.comp_value C_U2 = other.comp_unc # Perform complex division calculation div = 0 for U in C_U1: a = np.power(U, 2) # do sum div += a comp_unc = np.sqrt(div) comp_value = np.sum(C_V1) / C_V2 return Ucom(comp_value, comp_unc, self.name, self.dep) else: print("Error: Number of element of values and uncertainties must be same") else: # Perform complex division calculation comp_value = np.divide(self.comp_value, other.comp_value) # check variable correlation on complex division if self is other: comp_unc = np.divide(self.comp_unc, other.comp_unc) Ucom.id_check = id(self), id(other) elif Ucom.id_check[0] == id(other): comp_unc = np.divide(self.comp_unc, other.comp_unc) else: comp_unc = comp_value * (np.sqrt(np.power(self.comp_unc / self.comp_value, 2) + np.power(other.comp_unc / other.comp_value, 2))) # General Formula of Uncertainty for Division return Ucom(comp_value, comp_unc, self.name, self.dep) # Overloading "power" as a Polynomial functions for complex. Formula if R = X^n delta(R) = (|n|.delta(X).|R|)/X def __pow__(self, other): # Perform an power calculation comp_value = self.comp_value ** other comp_unc = (other * self.comp_unc * self.comp_value ** other) / self.comp_value return Ucom(comp_value, comp_unc, self.name, self.dep) # Calculation of Square root on given function def sqrt(self): comp_value = self.comp_value ** 0.5 comp_unc = (0.5 * self.comp_unc * comp_value) / self.comp_value return Ucom(comp_value, comp_unc, self.name, self.dep) # Calculation of natural logarithm - ln(x) on given function def ln(self): comp_value = math.log(self.comp_value) # ln(Value) comp_unc = self.comp_unc / self.comp_value return Ucom(comp_value, comp_unc, self.name, self.dep) # Calculation of logarithm - log10 on given function def ulog(self): comp_value = math.log10(self.comp_value) comp_unc = 0.434 * (self.comp_unc / self.comp_value) return Ucom(comp_value, comp_unc, self.name, self.dep) # Calculation of Antilog 10^x def tenPower(self): comp_value = 10 ** self.comp_value ln10 = 2.3026 comp_unc = comp_value * ln10 * self.comp_unc # ln10=2.3026 return Ucom(comp_value, comp_unc, self.name, self.dep) # Exponential function (e^x) calculation def uexp(self): comp_value = cmath.exp(self.comp_value) # e^1=2.718 comp_unc = comp_value * self.comp_unc return Ucom(comp_value, comp_unc, self.name, self.dep) # ############################ COMPLEX TRIGONOMETRIC FUNCTIONS CALCULATION ########################### # ======== !!Calculations are performed in radian !! ================= #################################################################################### # sinus function calculation. def sin(self): comp_value = cmath.sin(self.comp_value) comp_unc = self.comp_unc * cmath.cos(self.comp_value) # if y = sin(x) than U(y) = U(x)cos(x) return Ucom(comp_value, comp_unc, self.name, self.dep) # cosine function calculation def cos(self): comp_value = cmath.cos(self.comp_value) comp_unc = self.comp_unc * cmath.sin(self.comp_value) # if y = sin(x) than U(y) = U(x)cos(x) return Ucom(comp_value, comp_unc, self.name, self.dep) # tan function calculation def tan(self): comp_value = cmath.tan(self.comp_value) secSquared = (2 / (cmath.cos(2 * self.comp_value)) + 1) comp_unc = self.comp_unc * secSquared # if y = tan^2(x) than U(y) = -U(x)sec^2(x) return Ucom(comp_value, comp_unc, self.name, self.dep) # cot function calculation def cot(self): comp_value = 1 / cmath.tan(self.comp_value) csecSquared = -(2 / (1 - cmath.cos(2 * self.comp_value))) comp_unc = self.comp_unc * csecSquared # if y = cot^2(x) than U(y) = -U(x) csc^2(x) return Ucom(comp_value, comp_unc, self.name, self.dep) # ########## Inverse Trigonometric Calculation (Complex) ########################################### # arcsin function calculation def arcsin(self): comp_value = cmath.asin(self.comp_value) dx = (1 / cmath.sqrt(1 - self.comp_value ** 2)) comp_unc = self.comp_unc * dx # if y = sin^-1(x) than U(y) = -U(x) 1/sqrt(1-x^2) return Ucom(comp_value, comp_unc, self.name, self.dep) # arcos function calculation def arccos(self): comp_value = cmath.acos(self.comp_value) dx = cmath.sqrt(1 - self.comp_value ** 2) dxr = -1 / dx comp_unc = self.comp_unc * dxr # if y = cos^-1(x) than U(y) = -U(x) -1/sqrt(1-x^2) return Ucom(comp_value, comp_unc, self.name, self.dep) # arctan function calculation def arctan(self): comp_value = cmath.atan(self.comp_value) dx = 1 + self.comp_value ** 2 dxr = 1 / dx comp_unc = self.comp_unc * dxr # if y = tan^-1(x) than U(y) = -U(x) 1/1+x^2 return Ucom(comp_value, comp_unc, self.name, self.dep) # ########## Hyperbolic Trigonometric Calculation ########################################### # sinhx function calculation def sinh(self): comp_value = cmath.sinh(self.comp_value) dxr = cmath.cosh(self.comp_value) comp_unc = self.comp_unc * dxr # if y = sinhx than U(y) = U(x)coshx return Ucom(comp_value, comp_unc, self.name, self.dep) # coshx function calculation def cosh(self): comp_value = cmath.cosh(self.comp_value) dxr = cmath.sinh(self.comp_value) comp_unc = self.comp_unc * dxr # if y = coshx than U(y) = U(x)sinhx return Ucom(comp_value, comp_unc, self.name, self.dep) # tanhx function calculation def tanh(self): comp_value = cmath.tanh(self.comp_value) dx1 = 1 - cmath.cosh(2 * self.comp_value) dx2 = 1 + cmath.cosh(2 * self.comp_value) dx3 = dx1 * dx2 dxr = dx3 / 4 dxrf = (1 - dxr) comp_unc = self.comp_unc * dxrf # if y = tanhx than U(y) = U(x)(1-tanh^2x) return Ucom(comp_value, comp_unc, self.name, self.dep) # ########## Complex Inverse Hyperbolic Trigonometric Calculation ########################################### # asinhx function calculation def arcsinh(self): comp_value = cmath.asinh(self.comp_value) dx1 = cmath.sqrt(self.comp_value ** 2) + cmath.sqrt(1) dxr = 1 / dx1 comp_unc = self.comp_unc * dxr # if y = asinh(x) than U(y) = U(x) 1/sqrt(x^2+1) return Ucom(comp_value, comp_unc, self.name, self.dep) # acoshx function calculation def arccosh(self): comp_value = cmath.acosh(self.comp_value) dx1 = cmath.sqrt(self.comp_value ** 2) - cmath.sqrt(1) dxr = 1 / dx1 comp_unc = self.comp_unc * dxr # if y = acosh(x) than U(y) = U(x) 1/sqrt(x^2-1) return Ucom(comp_value, comp_unc, self.name, self.dep) # atanhx function calculation def arctanh(self): comp_value = cmath.atanh(self.comp_value) dx1 = 1 - self.comp_value ** 2 dxr = 1 / dx1 comp_unc = self.comp_unc * dxr # if y = atanh(x) than U(y) = U(x) 1/1-x^2 return Ucom(comp_value, comp_unc, self.name, self.dep) # Complex sum calculation def Csum(self): CV_sum = np.sum(self.comp_value) cal = 0 for U in self.comp_unc: a = np.power(U, 2) # perform sum calculation cal += a comp_unc = np.sqrt(cal) return Ucom(CV_sum, comp_unc, self.name, self.dep) def mean(vs): mean = np.mean(vs) return mean def stdev(vs): std = np.std(vs) return std def comStatisticUnc(self): value = self.comp_value count = np.count_nonzero(value) std = Unc.stdev(value) mean = Unc.mean(value) meanAvg = std/math.sqrt(count) return Ucom(mean, meanAvg, self.name, self.dep) # Complex numbers without enclosing parentheses. (6+4j) = 6+4j ========== def rmParen(comp): comp = str(comp) comp = comp.strip(')') comp = comp.strip('(') return comp # ########## format complex number before print ########################################### def comformat(com_num): if com_num.real > 0 or com_num.real < 0: if com_num.real > 0: if checktype(com_num.real) == 'int': ncom_num = "%.1f%s%.1f%s" % (com_num.real, '+', com_num.imag, 'j') ncom_num= ncom_num.replace(" ", "") ncom_num = complex(ncom_num) else: ncom_num = "%.3f%s%.3f%s" % (com_num.real, '+', com_num.imag, 'j') ncom_num= ncom_num.replace(" ", "") ncom_num = complex(ncom_num) elif com_num.real < 0: if checktype(com_num.real) == 'int': ncom_num = "%.1f%.1f%s" % (com_num.real, com_num.imag, 'j') ncom_num= ncom_num.replace(" ", "") ncom_num = complex(ncom_num) else: ncom_num = "%.3f%s%.3f%s" % (com_num.real, '+', com_num.imag, 'j') ncom_num= ncom_num.replace(" ", "") ncom_num = complex(ncom_num) return ncom_num else: vs = "%.3f%s" % (com_num.imag, 'j') vs = vs.replace(" ", "") vs = complex(vs) return vs def checktype(num): num = str(num-int(num))[1:] count = len(num) if count == 2: nv = num[1] if nv == '0': num = "int" return num
true
c7c86deb1a74ab467819a2de9c42f100043196d2
Python
jasonchang0/citadel-datathon-2018
/plot_health_quality.py
UTF-8
2,054
2.953125
3
[]
no_license
import pandas as pd import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt from matplotlib import style def fadeColor(c1, c2, mix=0): # fade (linear interpolate) from color c1 (at mix=0) to c2 (mix=1) # assert len(c1) == len(c2) # assert 0 <= mix <= 1, 'mix='+str(mix) rgb1 = np.array([int(c1[ii:ii + 2], 16) for ii in range(1, len(c1), 2)]) rgb2 = np.array([int(c2[ii:ii + 2], 16) for ii in range(1, len(c2), 2)]) rgb = ((1 - mix) * rgb1 + mix * rgb2).astype(int) # cOld='#'+''.join([hex(a)[2:] for a in rgb]) # print(11,[hex(a)[2:].zfill(2) for a in rgb]) c = '#' + ('{:}' * 3).format(*[hex(a)[2:].zfill(2) for a in rgb]) # print(rgb1, rgb2, rgb, cOld, c) return c df = pd.read_csv('aggregated_health_quality.csv') df_ref = pd.read_csv('food_service_establishment_inspections.csv') df_ref['county'] = df_ref.county.str.lower() df_ref['county'] = df_ref.county.str.capitalize() print(df_ref.head()) # avg_df = pd.DataFrame(columns=('lon', 'lat', 'weighted_metric')) # # for _ in avg_df.columns: # avg_df[_] = [np.nan] * len(df.county_name.unique()) df['lon'] = [np.nan] * len(df.county_name.unique()) df['lat'] = [np.nan] * len(df.county_name.unique()) print(df.head()) for _ in df.index.values: row = df.loc[_] df.set_value(_, 'lon', df_ref[df_ref.county == row['county_name']].longitude.mean()) df.set_value(_, 'lat', df_ref[df_ref.county == row['county_name']].latitude.mean()) print(df.head()) df.to_csv('health_quality_county_gps_coordinates.csv', index=False) c1='#FFDFDF' #light red c2='#E60000' #red wm_min = df.weighted_metric.min() print(wm_min) wm_range = df.weighted_metric.max() - wm_min df['percentile'] = (df.weighted_metric - wm_min)/wm_range color_lst = [] for _ in df.percentile: color_lst.append(fadeColor(c1, c2, _)) plt.scatter(df.lon, df.lat, c=df.percentile, s=25, linewidths=5, cmap='Reds') plt.colorbar() plt.xlabel('Longitude') plt.ylabel('Latitude') plt.savefig('county_health_demo.png', transparent=True) plt.show()
true
1ab025cfaa732541433b5109950fe3dd2127ffec
Python
wyattsmcall1/funcobspy
/functionobservers/examples/dnn_examples/frozen_dnn_example.py
UTF-8
2,651
2.703125
3
[]
no_license
import os import cPickle as pickle import numpy as np from keras.models import Sequential from keras.layers import Dense import matplotlib.pyplot as plt from functionobservers.mappers import FrozenDenseDNN def init_regular_model(nn_layer1=100, nn_layer2=50): # construct regression model model = Sequential() model.add(Dense(nn_layer1, input_shape=(1,), init='normal', activation='relu')) model.add(Dense(nn_layer2, init='normal', activation='relu')) last_layer = Dense(1, init='normal') last_layer.use_bias = False model.add(last_layer) # Compile model model.compile(loss='mean_squared_error', optimizer='adam') return model model = init_regular_model() # file and directory information data_dir = "./data/" out_dir = "./results/" f_prefix = "synthetic_time_series_generator_RBFNetwork_kernel_gaussian_scheme" f_scheme = "switching" # create files if not os.path.exists(out_dir): os.makedirs(out_dir) loadfile = os.path.join(data_dir, f_prefix + "_" + f_scheme + ".pkl") data_dict = pickle.load(open(loadfile, "rb")) orig_obs = data_dict['orig_func_obs'] orig_data = data_dict['orig_func_data'] orig_plot_vals = data_dict['orig_func_plot_vals'] # get shapes of arrays data_dim = orig_data.shape[1] obs_dim = orig_obs.shape[1] plot_dim = orig_plot_vals.shape[1] nsamp = orig_data.shape[0] nsteps = orig_data.shape[2] nplot = orig_plot_vals.shape[0] data_start = 0 data_end = 2*np.pi plot_data = np.linspace(data_start, data_end, nplot) plot_data_in = np.reshape(plot_data, (nplot, 1)) pred_plot_vals = np.zeros(orig_plot_vals.shape) nlayer1 = 100 nlayer2 = 50 nepochs = 1000 batch_size = 200 nn_shape = (nlayer1, nlayer2) be_shape = (batch_size, nepochs) print "Inferring DNN with layers " + str(nn_shape) + " over " + str(nsteps) + \ " steps with (batch_size, nepochs) = " + str(be_shape) # train regular model curr_model = init_regular_model(nn_layer1=nlayer1, nn_layer2=nlayer2) curr_data = orig_data[:, :, 0] curr_obs = orig_obs[:, :, 0] curr_plot_vals = orig_plot_vals[:, :, 0] curr_model.fit(curr_data, curr_obs, batch_size=batch_size, nb_epoch=nepochs, verbose=1) curr_preds = curr_model.predict(plot_data_in) # train frozen model frozen_model = FrozenDenseDNN(curr_model) frozen_model.fit(curr_data, curr_obs) frozen_preds = frozen_model.predict(plot_data_in) plt.figure() plt.plot(curr_data, curr_obs, 'ro', label='obs') plt.plot(plot_data_in, curr_plot_vals, 'k-', linewidth=3.0, label='actual') plt.plot(plot_data_in, curr_preds, 'b-', linewidth=3.0, label='model preds') plt.plot(plot_data_in, frozen_preds, 'g-', linewidth=3.0, label='frozen preds') plt.legend() plt.show()
true
1db2762f4fce1fae31c6bffb60072e8b12bf3707
Python
pydget/foba
/foba/matrices/matrix_strings/CompanyBrandTypes.py
UTF-8
1,142
2.765625
3
[]
no_license
# copyright: hbr.org # link: https://hbr.org/2019/12/build-a-culture-to-match-your-brand CompanyBrandTypes = [ ['Disruptive', 'Category leader', 'Rebellious, confident, daring', ['Virgin', 'Airbnb', 'Dr Pepper'], ], ['Conscious', 'Higher purpose', 'Inspiring, thoughtful, transparent', ['Seventh', 'Generation', 'SoulCycle', 'Patagonia'], ], ['Service', 'Customer need', 'Humble, predictable, friendly', ['Nordstrom', 'USAA', 'Ritz Carlton'], ], ['Innovative', 'Possibility', 'Risk-taking, imaginative, progressive', ['Apple', 'Nike', 'Amazon'], ], ['Value', 'Higher-priced brand', 'Down-to-earth, practical, straightforward', ['Walmart', 'IKEA', 'Subway'], ], ['Performance', 'Performance standard', 'Precise, competent, reliable', ['BMW', 'FedEx', 'American Express'], ], ['Luxury', 'Populist brand', 'Discriminating, refined, glamorous', ['Tiffany', 'Mercedes-Benz', 'Hermes'], ], ['Style', 'Functional brand', 'Creative, stylish, contemporary', ['Target', 'JetBlue', 'Mini Cooper'], ], ['Experience', 'Customer emotion', 'Exciting, energetic, imaginative', ['Disney', 'American Girl', 'Wegmans'], ], ]
true
84e422ae1f3b00a197498a2eed9d3c07f0479d98
Python
mazembo/readingNews
/lib/reading-yaml2.py
UTF-8
202
2.515625
3
[]
no_license
import yaml with open("2016-12-04.yml", 'r') as stream: articles = yaml.load(stream) print len(articles) print type(articles) print articles['article2'] print articles['article2']['picture']
true
068cc2aba6679973631c2ba0ad68a17bfec0067d
Python
gabrielleevaristo/algo-practice
/stacks/stackpractice2.py
UTF-8
5,550
4.03125
4
[]
no_license
# Determine if a string of parenthesis () is valid or not """ The method below uses extra space for a stack. For O(1) space, use the same method as in finding parenthesis depth. Increment the count if an opening parenthesis is encountered, else decrement the count. """ def isParenthesisValid(brackets): stack = [] for c in brackets: if c == '(': stack.append(c) if not stack: return False if c == ')': stack.pop() return True if not stack else False # Determine if a string of brackets ([{}]) is valid or not def areBracketsValid(brackets): stack = [] for c in brackets: # If character is an opening bracket, add it to the stack if c == '(' or c == '[' or c == '{': stack.append(c) # At this point, if the stack is empty, no opening bracket is detected if not stack: return False # Depending on the closing bracket, pop from the top of the stack and # compare the two characters. if c == ')': x = stack.pop() if x != '(': return False elif c == ']': x = stack.pop() if x != '[': return False else: x = stack.pop() if x != '{': return False # At the end of the loop, the stack should be empty. If so, return True return True if not stack else False # Given an index, find the index of its closing bracket def findClosingBracket(brackets,i): if brackets[i] != '(': return -1 # If character is not a (, there is no closing bracket s = [] # Start from the given index. If it's an opening bracket, push it to the stack. If it's a # closing bracket, pop it. If the stack become empty (matching brackets), return the index. for k in range(i,len(brackets)): if brackets[k] == '(': s.append(brackets[i]) elif brackets[k] == ')': s.pop() if not s: return k return -1 # Find the maximum depth of nested parenthesis in a string def parenthesisDepth(arr): maxCount, count = float('-inf'), 0 for c in arr: # Increment count if character is an opening bracket if c == "(": count += 1 if count > maxCount: maxCount = count # Decrement count if character is opening bracket. If count falls below # 0 (too many closing brackets), return -1 else: count -= 1 if count < 0: return -1 # If count does not equal 0, there are too many opening brackets if count != 0: return -1 return maxCount # Find the span for each day in the array """ Span for a given day = max number of consecutive days before it such that the price is less than or equal to it (includes the given day) """ def stockSpan(arr): # Creates a new array where each value is defaulted to 1 span = [1 for i in range(len(arr))] s = [0] for i in range(1,len(arr)): # If the value on top of the stack is smaller than the current value, pop it. Subtracting # the next value on top of the stack from the current index will produce the span. while s and arr[i] >= arr[s[-1]]: s.pop() # If the stack is empty, all values before it are less than it span[i] = i+1 if not s else i-s[-1] s.append(i) return span # Return the length of the longest valid substring of panrethesis def lengthOfLongestValidSubstring(arr): s = [-1] # Push -1 since index of last element-(-1) = length of entire string result = 0 for i in range(len(arr)): if arr[i] == '(': s.append(i) else: s.pop() if s: # If there are values in the stack, result = max(result, i - s[-1]) # current result is current index-top of stack else: s.append(i) return result # Reverse a string def reverseString(str): # return str[::-1] will also work without using extra space s = [] for c in str: # Add all values to the stack s.append(c) result = ['' for i in str] # Create a list with the same length of str for i in range(len(str)): # Add the values to the result by pop result[i] = s.pop() return ''.join(result) # Keep track of the maximum elemnent in a stack def maxElement(arr): maxStack = [arr[0]] max = float('-inf') for i in range(1,len(arr)): # If the current value is greater than the previous maximum value, add # current value to the stack. Else, just add the previous maximum value if arr[i] > maxStack[-1]: maxStack.append(arr[i]) else: maxStack.append(maxStack[-1]) return maxStack # Check if any two of the same words are consecutive, then destroy both, and return # the number of words left in the sequence def deleteConsecutiveWords(seq): s = [] for i in range(len(seq)): if not s: # Add value to stack is stack is empty s.append(seq[i]) else: if seq[i] == s[-1]: # If stack exists and consecutive words are the same, s.pop() # pop the value off the stack else: s.append(seq[i]) return len(s) print(deleteConsecutiveWords(["tom","bob","jerry","jerry","tom"]))
true
1d16cc040e961ce972db34ba12c8656f1cc2a17e
Python
nicolassnider/tkinter_flask_django
/python-udemy/Practica2/Practica02_04.py
UTF-8
584
4.53125
5
[]
no_license
''' Problema 04: Determine si un número es múltiplo de 3 y 5. Análisis: Para la solución de este problema, se requiere que usuario ingrese un número entero n, y luego el sistema analiza y determine si es el número de múltiplo de 3 y de 5. ''' valor = int(input("Valor:\n")) multiplo1=3 multiplo2=5 if valor%multiplo1==0: print (f"{valor} es multiplo de {multiplo1}") if valor%multiplo2==0: print (f"{valor} es multiplo de {multiplo2}") if(not valor%multiplo1==0 and not valor%multiplo2==0): print (f"{valor} no es multiplo de {multiplo1} ni {multiplo2}")
true
49baef3a6ffa7ae0f37cb61f3d8fe230f559cde4
Python
monkeydunkey/interviewCakeProblems
/wordSquares.py
UTF-8
1,291
3.375
3
[]
no_license
if __name__ == '__main__': def buildRecurseSquare(completedMat, startmap): print 'HI' if len(completedMat) == len(completedMat[0]): # we have a square matrix return [completedMat] outLi = [] currLength = len(completedMat) wordToLook = completedMat[0][currLength] print wordToLook, startmap[ord(wordToLook) - ord('a')] for w in startmap[ord(wordToLook) - ord('a')]: print w i = 0 cand = True while i < currLength: if w[i] != completedMat[i][currLength]: cand = False break i += 1 if cand: outLi.extend(buildRecurseSquare(completedMat + [w], startmap)) return outLi def wordSquares(words): """ :type words: List[str] :rtype: List[List[str]] """ startmap = [[] for x in xrange(26)] for w in words: startmap[ord(w[0]) - ord('a')].append(w) possibleSquares = [] for w in words: possibleSquares.extend(buildRecurseSquare([w], startmap)) print possibleSquares return possibleSquares print wordSquares(["area","lead","wall","lady","ball"])
true
adcf309b9863ccbe1af38113524e91a7088fe475
Python
chiraggandhi123/ML
/Lecture_03/a.py
UTF-8
100
3.578125
4
[]
no_license
sum=0 while(True): a=int(input()) sum+=a if sum<0: break print(a)
true
f3597894d479e54f535858eef87ce537fade1a3f
Python
fjluartes/pcc
/ch11/city_functions.py
UTF-8
376
3.21875
3
[]
no_license
#!/usr/bin/env python # city_functions.py: Exercise 11-1, 11-2 # 4 Sep 2018 | fjgl def get_formatted_city(city, country, population=''): """Neatly formatted city and country.""" if population: formatted_city = city + ", " + country + " - population " + str(population) else: formatted_city = city + ", " + country return formatted_city.title()
true
89c6b25ad41ce68138de86fad6f3d5139a521655
Python
falconlee236/CodingTheMatrix-Answer
/src/Chap12_Problem.py
UTF-8
789
3.078125
3
[ "MIT" ]
permissive
from mat import Mat from vecutil import list2vec from matutil import listlist2mat from svd import factor # Problem 12.8.1 def squared_Frob(A): return sum([A[x] * A[x] for x in A.f]) print(squared_Frob(listlist2mat([[1, 2, 3, 4], [-4, 2, -1, 0]]))) # Problem 12.8.7 def SVD_solve(U, Sigma, V, b): try: answer = V * Mat(Sigma.D, {key: 1/value for key, value in Sigma.f.items()}) * U.transpose() * b except ZeroDivisionError: return "FAIL" else: return answer A = listlist2mat([[1, 1, 0], [1, 0, 1], [0, 1, 1]]) U, Sigma, V = factor(A) b = list2vec([2, 3, 3]) print(SVD_solve(U, Sigma, V, b)) A = listlist2mat([[1, 1, 1], [1, 1, 1], [0, 1, 1]]) U, Sigma, V = factor(A) b = list2vec([2, 3, 3]) print(SVD_solve(U, Sigma, V, b))
true