seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
28075594780 | import sklearn
import pandas as pd
from collections import Counter
import math
from sklearn.datasets import load_iris
### compute entropy for a set of classification, provided as a pandas Series
def entropy(classes) :
vals = set(classes)
counts = Counter(classes)
ent = 0.0
for val in vals :
frequency = counts[val] / len(classes)
ent += -1 * frequency * math.log(frequency, 2)
return ent
### Assume that both attribute and classes are pandas Series
### For each value of attribute, compute the entropy. Then return the weighted sum
def remainder(attribute, classes) :
uniAttr = attribute.unique()
xlen = len(classes)
map = {}
rem = 0.0
for val in uniAttr:
map[val] = []
rows = attribute.index
for val in rows:
map[attribute[val]].append(val)
for val in uniAttr:
tempRows = map[val]
tempClass = classes[tempRows]
entTemp = entropy(tempClass)
rem += (len(tempRows) / xlen) * entTemp
return rem
pass
### assume that data is a pandas dataframe, and classes is a pandas series
### For each column in the dataframe, compute the remainder and select the column with the lowest
### remainder
def selectAttribute(data, classes):
numCol = data.shape[1]
minIndex = ""
minRem = 1.0
for index, atrr in data.iteritems():
tempRem = remainder(atrr, classes)
print(tempRem)
if tempRem <= minRem:
minRem = tempRem
minIndex = index
print(minIndex)
print("\n")
return minIndex
pass
### Now we're ready to build a Decision Tree.
### A tree consists of one or more Nodes.
### A Node is either a leaf node, which has a value and no children
### Or it is a non-leaf, in which case it has an attribute that it tests and a set of children.
class Node :
def __init__(self, attribute=None, value=None):
self.attribute = attribute
self.value=value
self.children={}
def isLeaf(self):
return len(self.children) == 0
### you'll implement this
def classify(self, instance):
if self.isLeaf():
return self
else:
key = instance[self.attribute]
nextNode = self.children[key]
nextInstance = instance.drop(self.attribute)
return nextNode.classify(nextInstance)
def __repr__(self) :
return "%s %s" % (self.attribute, self.value)
##
class DecisionTree :
def __init__(self, root) :
self.root = root
def __init__(self, df, abbrDic):
self.root = makeNode(df, abbrDic)
### assume instance is a pandas dataframe - use node.classify as a helper.
def classify(self, instance):
return self.root.classify(instance)
### construct a decision tree. Inputs are a pandas dataframe containing a dataset,
### and an attributeDict that maps each attribute to the possible values it can take on.
### We make the tree recursively. There are three base cases:
### 1. All the data is of the same class.
### In this case, we are at a leaf node. set the value to be the classification.
### 2. We are out of attributes to test.
### In this case, apply ZeroR.
### 3 We are out of data
### In this case, apply ZeroR.
### Return the node
### Otherwise :
### 1. Use selectAttribute to find the attribute with the largest information gain.
### 2. Break the data into subsets according to each value of that attribute.
### 3. For each subset, call makeNode
def ZeroR (cla, res):
index = cla.index
counts = Counter(cla)
xset = set(cla)
max = counts[index[0]]
for val in xset:
if counts[val] > max:
max = counts[val]
maxindex = val
return maxindex
def makeNode(df, attributeDict) :
numRow = df.shape[0]
numCol = df.shape[1]
data = df.iloc[:, 0: numCol - 1]
cla = df.iloc[:, numCol - 1]
minCol = selectAttribute(data, cla)
root = Node()
# out of data
if numRow == 0:
resLabel = df.columns[-1]
zr = attributeDict[resLabel][0]
return Node(None, zr)
# all data is same
index = cla.index
temp = cla[index[0]]
tempCounts = Counter(cla)
if tempCounts[temp] == len(cla):
return Node(None, temp)
# out of attr
if df.shape[1] == 1:
temp = attributeDict[df.columns[-1]]
return Node(None, ZeroR(cla, temp))
# can be split
bestArr = selectAttribute(data, cla)
root = Node(bestArr)
t = bestArr in attributeDict
for val in attributeDict[bestArr]:
subAttrDic = dict.copy(attributeDict)
del subAttrDic[bestArr]
# edit the data
temp = df[df[bestArr] == val]
temp = temp.drop([bestArr], axis = 1)
root.children[val] = makeNode(temp, subAttrDic)
return root | He-Zhao17/USF_Works | Python_AI/Python-IntroToAI-Assign3-rMake/Python-IntroToAI-Assign3-rMake/dt.py | dt.py | py | 4,814 | python | en | code | 0 | github-code | 13 |
20953187542 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#import logging
#logging.basicConfig(level=logging.INFO)
import pdb
s = '0'
n = int(s)
pdb.set_trace() #运行到这里会自动暂停进入pdb调试环境
#logging.info('n = %d' %n)
print(10/n) | jsqwe5656/MyPythonNote | py_debug/test_err.py | test_err.py | py | 245 | python | en | code | 0 | github-code | 13 |
38387733909 | import pytest
from pydantic import ValidationError
from app.models.orm_models.advertisement import Advertisement
from app.models.orm_models.user import User
from app.repositories.advertisement_repository import AdvertisementRepo
from app.repositories.user_repository import UserRepository
from app.services.advertisement_service import AdvertisementService
async def get_mock_user(*args, **kwargs):
return User(user_id=1, country="KR", gender="M", balance=0)
async def get_mock_advertisements(*args, **kwargs):
return [
Advertisement(
name="test",
image_url="abc.jpg",
landing_url="https://test",
weight=300,
target_country="KR",
target_gender="M",
reward=300,
)
]
@pytest.mark.asyncio
async def test_get_advertisements(monkeypatch):
monkeypatch.setattr(UserRepository, "get_user", get_mock_user)
monkeypatch.setattr(
AdvertisementRepo, "get_advertisements", get_mock_advertisements
)
with pytest.raises(ValidationError):
advertisement_service = AdvertisementService()
await advertisement_service.get_advertisements(
user_id=1, country="KR", gender="M", session=None
)
| devOceanblue/marketing-API-Server-project | tests/unit/services/test_advertisement.py | test_advertisement.py | py | 1,250 | python | en | code | 0 | github-code | 13 |
71964715858 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re
import heapq
from collections import namedtuple, defaultdict, Counter
#from urlparse import urlparse
def get_lines(filename):
with open(filename, 'rU') as f:
for line in f:
yield line
def get_matches(reader, pattern, processor):
regex = re.compile(pattern)
for line in reader:
match = regex.match(line)
if match is not None:
yield processor(match)
PATTERN = (
'(?P<TIME>\d{16})\t' # only for dates after 09.09.2001
'(?P<ID>\d{1,8})\t'
'(?P<TYPE>(StartRequest|BackendConnect|BackendRequest|BackendOk|BackendError|'
'StartMerge|StartSendResult|FinishRequest))'
'(?:\t(?P<ADDITIONAL>.*))?'
)
ADDITIONAL_PATTERNS = {
'StartRequest': None,
'BackendConnect': '(?P<GR>\d{1,8})\t(?P<URL>.*)',
'BackendRequest': '(?P<GR>\d{1,8})',
'BackendOk': '(?P<GR>\d{1,8})',
'BackendError': '(?P<GR>\d{1,8})\t(?P<ERROR>.*)',
'StartMerge': None,
'StartSendResult': None,
'FinishRequest': None,
}
Match = namedtuple('Match', ('time', 'id', 'type', 'additional'))
def get_processor(additional_patterns={}):
additional_regex = {k: re.compile(v) for k, v in additional_patterns.iteritems() if v}
additional_match = lambda t, x: additional_regex[t].match(x).groupdict() if t in additional_regex else x
def processor(match):
results = match.groupdict()
return Match(
time=long(results['TIME']),
id=results['ID'],
type=results['TYPE'],
additional=additional_match(results['TYPE'], results['ADDITIONAL']),
)
return processor
def process_requests(sequence):
times = []
send_times = []
fails = 0
backend_ok = defaultdict(Counter)
backend_error = defaultdict(lambda: defaultdict(Counter))
url_re = re.compile(r'http:\/\/(?P<NETLOC>[^\/]*).*')
requests = defaultdict(dict) # buffer for storing request data
for match in sequence:
if match.type == 'StartRequest':
requests[match.id]['start'] = match.time
requests[match.id]['backends'] = defaultdict(dict)
if match.type == 'StartSendResult':
requests[match.id]['send'] = match.time
elif match.type.startswith('Backend'):
gr = match.additional['GR']
if match.type == 'BackendConnect':
url_match = url_re.match(match.additional['URL'])
requests[match.id]['backends'][gr] = url_match.group('NETLOC') if url_match \
else match.additional['URL']
#requests[match.id]['backends'][gr] = urlparse(match.additional['URL']).netloc # too slow!
elif match.type == 'BackendError':
url = requests[match.id]['backends'][gr]
error = match.additional['ERROR']
backend_error[gr][url][error] += 1
elif match.type == 'BackendOk':
url = requests[match.id]['backends'].pop(gr) # if request is ok, remove its url
backend_ok[gr][url] += 1
elif match.type == 'FinishRequest':
times.append(match.time - requests[match.id]['start'])
send_times.append((match.id, match.time - requests[match.id]['send']))
if requests[match.id]['backends']: # if some urls are left => error occured
fails += 1
del requests[match.id]
return {
'p95': times[int(len(times)*0.95)],
'top10': map(lambda x: str(x[0]), heapq.nlargest(10, send_times, key=lambda x: x[1])),
'fails': fails,
'ok': backend_ok,
'err': backend_error,
}
def output(filename, data):
with open(filename, 'w') as f:
f.write((
'95-й перцентиль времени работы: {}\n\n'
'Идентификаторы запросов с самой долгой фазой отправки результатов пользователю:\n{}\n\n'
'Запросов с неполным набором ответивших ГР: {}\n\n')
.format(data['p95'], ' '.join(data['top10']), data['fails'])
)
f.write('Обращения и ошибки по бекендам:\n')
for gr_key in set(data['ok'].viewkeys()) | set(data['err'].viewkeys()):
f.write('ГР {}:\n'.format(gr_key))
ok = data['ok'][gr_key]
err = data['err'][gr_key]
for url_key in set(ok.viewkeys()) | set(err.viewkeys()):
f.write('\t{}\n'.format(url_key))
total = ok[url_key] + sum(err[url_key].values())
f.write('\t\tОбращения: {}\n'.format(total))
errors = err[url_key]
if errors:
f.write('\t\tОшибки:\n')
for k, v in errors.iteritems():
f.write('\t\t\t{}: {}\n'.format(k, v))
def log_analyser(in_file, out_file):
output(
out_file,
process_requests(
get_matches(
reader=get_lines(in_file),
pattern=PATTERN,
processor=get_processor(additional_patterns=ADDITIONAL_PATTERNS)
)
)
)
log_analyser('input.txt', 'output.txt')
| avrybintsev/log_parser | parser.py | parser.py | py | 5,379 | python | en | code | 0 | github-code | 13 |
37563340364 | inputStr = '''57
2 2
50 30
30 27'''
count = -1
def input():
global count
count += 1
splitArr = inputStr.split('\n')
return splitArr[count]
import math
lowestAmount = -1
total = 0
def getAmount(price, count):
price = float(price)
count = float(count)
global total,lowestAmount
bundle_count = math.ceil(float(total)/float(count))
amount = int(bundle_count * price)
if (lowestAmount == -1 or amount < lowestAmount):
lowestAmount = amount
def set_total(buy_total):
global total
total = buy_total
set_total(input())
[penCount, penPrice] = input().split()
getAmount(penPrice, penCount)
[penCount, penPrice] = input().split()
getAmount(penPrice, penCount)
[penCount, penPrice] = input().split()
getAmount(penPrice, penCount)
print(lowestAmount)
| JrontEnd/luogu | P1909/app.py | app.py | py | 801 | python | en | code | 0 | github-code | 13 |
19975332284 | """This module contains our main WebView widget for Kivy."""
from kivy.core.window import Window
from kivy.clock import Clock
from kivy.event import EventDispatcher
from kivy.uix.actionbar import ActionView, ActionBar, ActionButton
from kivy.uix.widget import Widget
from android.runnable import run_on_ui_thread
from jnius import autoclass
from webviewclient import create_forwarding_web_view_client
# Reference android classes.
WebView = autoclass('android.webkit.WebView')
WebViewClient = autoclass('android.webkit.WebViewClient')
LayoutParams = autoclass('android.view.ViewGroup$LayoutParams')
View = autoclass('android.view.View')
activity = autoclass('org.kivy.android.PythonActivity').mActivity
class WebViewWrapper(Widget, EventDispatcher):
_events = ['on_should_override_url_loading', 'on_page_started', 'on_page_finished', 'on_received_error',
'on_page_commit_visible', 'on_back_button']
def __init__(self, action_bar):
"""The Web View Wrapper is a Kivy Widget, which creates and manages a native android web view.
The methods from the native android web view are exposed on this class. The calls are passed
to the native android view. The actual native web view is constructed later on the ui thread.
"""
self.action_bar = action_bar
self._web_view = None
# Register the events we're interested in.
self._register_events()
# Call our parent constructors.
super(WebViewWrapper, self).__init__()
# Create our web view on the ui thread.
self.create_web_view()
def dispatch_event(self, event_name, **kwargs):
print('EEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEE Event %s dispatched \n' % event_name, kwargs)
self.dispatch(event_name, **kwargs)
def _event_default_handler(self, **kwargs):
pass
def _register_events(self):
events = self._events
for event_name in events:
setattr(self, event_name, self._event_default_handler)
self.register_event_type(event_name)
def __getattr__(self, method_name):
"""Expose the native web view methods on this class."""
if hasattr(self._web_view, method_name):
return lambda *x: getattr(self._web_view, method_name)(*x)
else:
raise Exception("WebViewWrapper::%s was not defined." % method_name)
def on_size(self, *args):
"""Called when the screen orientation changes."""
# Layout the web view on the ui thread.
self.layout_web_view()
@run_on_ui_thread
def layout_web_view(self):
if not self._web_view:
return
print('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')
# Set the top left corner.
self._web_view.setX(0)
print('BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB')
self._web_view.setY(Window.height * 0.08) #(self.action_bar.height)
print('CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC')
# Set the layout params.
lp = self._web_view.getLayoutParams()
print('DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD')
lp.height = Window.height * 0.92
print('EEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEE')
lp.width = Window.width
print('FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF')
# Request another layout run.
self._web_view.requestLayout()
print('GGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGGG')
@run_on_ui_thread
def create_web_view(self):
"""Create the web view."""
# Return if a web view has already been created.
if self._web_view:
return True
# Create the native android web view.
self._web_view = WebView(activity)
# Setup the web view.
settings = self._web_view.getSettings()
settings.setJavaScriptEnabled(True)
settings.setUseWideViewPort(True) # enables viewport html meta tags
settings.setLoadWithOverviewMode(True) # uses viewport
settings.setSupportZoom(True) # enables zoom
settings.setBuiltInZoomControls(True) # enables zoom controls
# Set the forwarding web view client.
# This allows us to get events from the native web view.
self.client = create_forwarding_web_view_client(self)
self._web_view.setWebViewClient(self.client)
# Set the top left corner.
self._web_view.setX(0)
self._web_view.setY(Window.height * 0.08)
# Add the web view to our view.
height = Window.height * 0.92
width = Window.width
activity.addContentView(self._web_view, LayoutParams(width, height))
self._web_view.loadUrl('https://www.youtube.com')
@run_on_ui_thread
def destroy_web_view(self):
if self._web_view:
self._web_view.clearHistory()
self._web_view.clearCache(True)
self._web_view.loadUrl("about:blank")
self._web_view = None
@run_on_ui_thread
def hide_web_view(self):
"""Hides the web view."""
if self._web_view is None:
return False
self._web_view.setVisibility(View.GONE)
@run_on_ui_thread
def show_web_view(self):
"""Show the web view."""
if self._web_view is None:
return False
self._web_view.setVisibility(View.VISIBLE) | nodegraph/youmacro | pythonwebview/webviewwrapper.py | webviewwrapper.py | py | 5,526 | python | en | code | 0 | github-code | 13 |
6251694636 | #!/usr/bin/env python2.6
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
#from __future__ import unicode_literals
import csv
import os
from StringIO import StringIO
import numpy as np
DIRLAHMAN = '/home/taro/src/s2r/lahman58'
class LahmanReader(object):
dtype = []
internaldelimiter = '|'
def __init__(self, filename):
self.filename = filename
self.data = self.read()
def read(self, regeneratenpy=False):
binfilename = self.filename[:-4] + '.npy'
if not regeneratenpy and os.path.exists(binfilename):
return np.load(binfilename)
with open(self.filename) as f:
reader = csv.reader(f, delimiter=',')
s = StringIO()
csvw = csv.writer(s, delimiter=self.internaldelimiter)
r = [o for o in reader][1:]
for o in r:
csvw.writerow(o)
s.seek(0)
if 0:
# find the max length for each field
rr = np.array(r).transpose()
for i, o in enumerate(rr):
ss = 0
for e in o:
ss = len(e) if len(e) > ss else ss
print((self.dtype[i][0], ss))
converters = {}
for i, (name, dt) in enumerate(self.dtype):
if dt.startswith('i'):
converters[i] = lambda x: int(x or 0)
elif dt.startswith('f'):
converters[i] = lambda x: float(x or 0)
elif dt.startswith('a'):
converters[i] = lambda x: str(x)
r = np.loadtxt(s, skiprows=0, delimiter=self.internaldelimiter,
dtype=self.dtype, converters=converters)
s.close()
np.save(binfilename, r)
return r
class Master(LahmanReader):
dtype = [('lahmanID', 'i'),
('playerID', 'a9'),
('managerID', 'a10'),
('hofID', 'a10'),
('birthYear', 'i'),
('birthMonth', 'i'),
('birthDay', 'i'),
('birthCountry', 'a24'),
('birthState', 'a2'),
('birthCity', 'a31'),
('deathYear', 'i'),
('deathMonth', 'i'),
('deathDay', 'i'),
('deathCountry', 'a34'),
('deathState', 'a2'),
('deathCity', 'a21'),
('nameFirst', 'a12'),
('nameLast', 'a14'),
('nameNote', 'a80'),
('nameGiven', 'a43'),
('nameNick', 'a48'),
('weight', 'f'),
('height', 'f'),
('bats', 'a1'),
('throws', 'a1'),
('debut', 'a18'),
('finalGamw', 'a18'),
('college', 'a40'),
('lahman40ID', 'a9'),
('lahman45ID', 'a9'),
('retroID', 'a8'),
('holtzID', 'a9'),
('bbrefID', 'a9')]
def __init__(self, filename='%s/Master.csv' % DIRLAHMAN):
super(Master, self).__init__(filename)
class Batting(LahmanReader):
dtype = [("playerID", 'a9'),
("yearID", 'i'),
("stint", 'i'),
("teamID", 'a3'),
("lgID", 'a2'),
("G", 'i'),
("G_batting", 'i'),
("AB", 'i'),
("R", 'i'),
("H", 'i'),
("2B", 'i'),
("3B", 'i'),
("HR", 'i'),
("RBI", 'i'),
("SB", 'i'),
("CS", 'i'),
("BB", 'i'),
("SO", 'i'),
("IBB", 'i'),
("HBP", 'i'),
("SH", 'i'),
("SF", 'i'),
("GIDP", 'i'),
("G_old", 'i')]
def __init__(self, filename='%s/Batting.csv' % DIRLAHMAN):
super(Batting, self).__init__(filename)
class Pitching(LahmanReader):
dtype = [("playerID", 'a9'),
("yearID", 'i'),
("stint", 'i'),
("teamID", 'a3'),
("lgID", 'a2'),
("W", 'i'),
("L", 'i'),
("G", 'i'),
("GS", 'i'),
("CG", 'i'),
("SHO", 'i'),
("SV", 'i'),
("IPOuts", 'i'),
("H", 'i'),
("ER", 'i'),
("HR", 'i'),
("BB", 'i'),
("SO", 'i'),
("BAOpp", 'f'),
("ERA", 'f'),
("IBB", 'i'),
("WP", 'i'),
("HBP", 'i'),
("BK", 'i'),
("BFP", 'i'),
("GF", 'i'),
("R", 'i'),
("SH", 'i'),
("SF", 'i'),
("GIDP", 'i')]
def __init__(self, filename='%s/Pitching.csv' % DIRLAHMAN):
super(Pitching, self).__init__(filename)
class Fielding(LahmanReader):
dtype = [("playerID", 'a9'),
("yearID", 'i'),
("stint", 'i'),
("teamID", 'a3'),
("lgID", 'a2'),
("Pos", 'a2'),
("G", 'i'),
("GS", 'i'),
("InnOuts", 'i'),
("PO", 'i'),
("A", 'i'),
("E", 'i'),
("DP", 'i'),
("PB", 'i'),
("WP", 'i'),
("SB", 'i'),
("CS", 'i'),
("ZR", 'f')]
def __init__(self, filename='%s/Fielding.csv' % DIRLAHMAN):
super(Fielding, self).__init__(filename)
# removing pos = 'OF' data, as they are duplicates of
# individual outfield positions
self.data = self.data[self.data['Pos'] != 'OF']
# removing pos = 'DH' data, as they have no fielding stats
self.data = self.data[self.data['Pos'] != 'DH']
class Appearances(LahmanReader):
dtype = [("yearID", 'i'),
("teamID", 'a3'),
("lgID", 'a2'),
("playerID", 'a9'),
("UNKNOWN", 'i'),
("G_all", 'i'),
("G_start", 'i'),
("G_batting", 'i'),
("G_defense", 'i'),
("G_p", 'i'),
("G_c", 'i'),
("G_1b", 'i'),
("G_2b", 'i'),
("G_3b", 'i'),
("G_ss", 'i'),
("G_lf", 'i'),
("G_cf", 'i'),
("G_rf", 'i'),
("G_of", 'i'),
("G_dh", 'i')]
def __init__(self, filename='%s/Appearances.csv' % DIRLAHMAN):
super(Appearances, self).__init__(filename)
def main():
m = Master().data
b = Batting().data
p = Pitching().data
f = Fielding().data
print(f)
b = p
lastname = 'nomo'
idxs = []
for i, o in enumerate(m):
nl = o['nameLast'].lower()
if nl.find(lastname) >= 0:
idxs.append(o['playerID'])
print(idxs)
playerid = idxs[0]
b = b[b['playerID'] == playerid]
print(b)
print("")
m = np.zeros(b.size)
for year in [1995, 1996, 1997]:
m += (b['yearID'] == year)
b = b[m.astype(bool)]
print(b)
print("")
#t = b[['G', 'AB', 'HR']]
t = b[['G', 'W', 'L']]
print(t)
ts = []
for o in t:
ts.append(list(o))
t = np.array(ts)
print(t.dtype)
print(t.sum(axis=0))
#t = np.array(b[['G', 'HR']], dtype='i')
#print(t)
#t = t.sum()
#print(t)
if __name__ == '__main__':
main()
| nomo17k/s2r | s2r/lahman58.py | lahman58.py | py | 7,388 | python | en | code | 1 | github-code | 13 |
43973352213 | from kivy.uix.image import Image
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.button import Button
from kivy.uix.label import Label
from kivy.uix.screenmanager import ScreenManager, Screen, SlideTransition
from kivy.graphics import Color, Rectangle, RoundedRectangle, Rotate, PushMatrix, PopMatrix
from kivy.animation import Animation
import kivy_homepage as home
import kivy_config as cfg
_routine_options = [None, None]
_start_button = None
_routine_choice = -1
_routine_list = []
class QuasiButtonLabel(Label):
def __init__(self, **kwargs):
super(QuasiButtonLabel, self).__init__(**kwargs)
self.background_normal = cfg.button_params['bg_normal']
self.background_color = cfg.button_params['bg_color']
self.color = cfg.button_params['color']
with self.canvas.before:
self.color_template = Color(*self.background_color)
self.rect = RoundedRectangle(
pos =(0, 0),
size =(200, 100),
radius =(10, 10)
)
self.text = self.text
def on_pos(self, instance, pos):
self.rect.pos = pos
def on_size(self, instance, size):
self.rect.size = size
self.font_size = 0.10*size[0]
def on_background_color(self, instance, color):
self.color_template.rgba = color
def on_text_size(self, instance, text_size):
self.text_size = self.width*0.8
class RoutineWidgetLayout(FloatLayout):
def __init__(self, **kwargs):
super(FloatLayout, self).__init__(**kwargs)
self.exer_icon = Image(
source = '',
size_hint = [0.8, 0.4],
pos_hint = {'center_x': 0.5, 'center_y': 0.7},
fit_mode = 'fill',
)
self.exer_reps = Label(
text = 'Repetitions: 0',
size_hint = [0.8, 0.2],
pos_hint = {'center_x': 0.5, 'center_y': 0.3},
color = (0,0,0,1),
)
self.exer_sets = Label(
text = 'Sets: 0',
size_hint = [0.8, 0.2],
pos_hint = {'center_x': 0.5, 'center_y': 0.1},
color = (0,0,0,1),
)
self.add_widget(self.exer_icon)
self.add_widget(self.exer_reps)
self.add_widget(self.exer_sets)
self.text_scale = 0.20
def on_size(self, instance, size):
self.exer_reps.font_size = self.text_scale*size[0]
self.exer_sets.font_size = self.text_scale*size[0]
def on_pos(self, instance, pos):
self.exer_icon.pos = pos
self.exer_reps.pos = pos
self.exer_sets.pos = pos
def get_reps(self):
'''
Returns the number of reps for this particular exercise.
'''
return int(self.exer_reps.text[13:])
def set_reps(self, reps: int):
'''
Updates the number of reps for this particular exercise.
'''
self.exer_reps.text = 'Repetitions: ' + str(reps)
def get_nsets(self):
'''
Returns the number of sets for this particular exercise.
'''
return int(self.exer_sets.text[6:])
def set_nsets(self, sets: int):
'''
Updates the number of sets for this particular exercise.
'''
self.exer_sets.text = 'Sets: ' + str(sets)
def get_image_source(self):
'''
Returns the image path for this particular exercise.
Provided just in case this is needed.
'''
return self.exer_icon.source
def set_image_source(self, src: str):
'''
Updates the image path for this particular exercise.
'''
self.exer_icon.source = src
def option_recipe(manager: ScreenManager):
option_layout = FloatLayout(
size_hint = [0.75, 0.56],
pos_hint = {'center_x': 0.5, 'center_y': 0.50}
)
for i in range(2):
opt_button = Button(
size_hint = [1.00, 0.45],
pos_hint = {'center_x': 0.5, 'center_y': (4 - (2*i + 1))*0.25},
background_normal = cfg.button_params['bg_normal'],
background_color = (0,0,0,0)
)
# ==========================================
# The Image Backdrop
# ==========================================
opt_button_image = Image(
source = cfg.option_img_path,
fit_mode = 'fill',
size_hint = [1.00, 1.00],
)
def opt_button_factory(opt_button, opt_button_image, i):
click_layer = None
def on_button_size(instance, size):
opt_button_image.size = size
global _routine_choice
if (_routine_choice == i):
nonlocal click_layer
if (click_layer is None):
return
click_layer.size = size
def on_button_pos(instance, pos):
opt_button_image.pos = pos
global _routine_choice
if (_routine_choice == i):
nonlocal click_layer
if (click_layer is None):
return
click_layer.pos = pos
def on_button_click(instance):
global _routine_choice
if (_routine_choice > -1):
print("Previous, current:", _routine_choice, i)
_routine_options[_routine_choice].canvas.after.clear()
_start_button.disabled = (_routine_choice == i)
if (_routine_choice == i):
_routine_choice = -1
else:
_routine_choice = i
start_opacity, end_opacity = 0.2, 0.0
opt_button_image.canvas.after.clear()
if (_routine_choice == i):
# Create click layer.
start_opacity, end_opacity = end_opacity, start_opacity
nonlocal click_layer
if (end_opacity <= 0.0):
click_layer = None
return
with opt_button_image.canvas.after:
Color(0.0, 0.0, 0.0, end_opacity)
click_layer = Rectangle(
size = opt_button_image.size,
pos = opt_button_image.pos,
)
opt_button.bind(size=on_button_size)
opt_button.bind(pos=on_button_pos)
opt_button.bind(on_release=on_button_click)
opt_button.add_widget(opt_button_image)
opt_button_factory(opt_button, opt_button_image, i)
_routine_options[i] = opt_button_image
# ==========================================
# The Three Image Sub-Layouts
# ==========================================
# ==========================================
# Routine Text
# ==========================================
rot_width = 0.06
rotated_layout = FloatLayout(
size_hint = [rot_width, 1.0],
pos_hint = {'x': 0.0, 'y': 0.0},
)
rotated_text = Label(
size_hint = [1, 1],
pos_hint = {'x': 0.0, 'y': -0.04},
font_size = 54,
font_name = cfg.font_name,
)
rotated_text.text = 'ROUTINE ' + str(i+1)
rotated_layout.add_widget(rotated_text)
def opt_button_image_factory(opt_button_image, child):
def on_btn_image_size(instance, size):
for outer_child in instance.children:
if outer_child.size_hint is None:
pass
# outer_child.size = outer_child.size
else:
outer_child.size = [size[0]*outer_child.size_hint[0],
size[1]*outer_child.size_hint[1]]
def on_btn_image_pos(instance, pos):
for outer_child in instance.children:
if outer_child.pos_hint is None:
outer_child.pos = pos
continue
offset = [0, 0]
pos_key = ''
if 'x' in outer_child.pos_hint:
offset[0] = instance.size[0]*outer_child.pos_hint['x']
pos_key = 'x'
elif 'center_x' in outer_child.pos_hint:
offset[0] = instance.size[0]*outer_child.pos_hint['center_x'] + outer_child.size[0]*0.5
pos_key = 'center_x'
elif 'right' in outer_child.pos_hint:
offset[0] = instance.size[0]*(1 - outer_child.pos_hint['right'])
pos_key = 'right'
pos_key = ''
if 'y' in outer_child.pos_hint:
offset[1] = instance.size[1]*outer_child.pos_hint['y']
pos_key = 'y'
elif 'center_y' in outer_child.pos_hint:
offset[1] = instance.size[1]*outer_child.pos_hint['center_y'] + outer_child.size[1]*0.5
pos_key = 'center_y'
elif 'top' in outer_child.pos_hint:
offset[1] = instance.size[1]*(1 - outer_child.pos_hint['top'])
pos_key = 'top'
outer_child.pos = [pos[0] + offset[0],
pos[1] + offset[1]]
label_loop_depth = 0
def on_label_pos(instance, pos):
nonlocal label_loop_depth
if label_loop_depth > 0:
return
import math
debug_angle = 90
origin = [0,0]
# With rotation (q)A(q^-1), calculate (q^-1)
delta_theta = debug_angle*math.pi/180
theta = math.atan2(pos[1], pos[0]) - delta_theta
dist = math.dist(origin, pos)
center_theta = math.atan2(instance.center[1], instance.center[0]) - delta_theta
center_dist = math.dist(origin, instance.center)
child_center = [center_dist*math.cos(center_theta),
center_dist*math.sin(center_theta)]
child_pos = [dist*math.cos(theta),
dist*math.sin(theta)]
label_loop_depth += 1
instance.center = child_center
label_loop_depth -= 1
instance.canvas.before.clear()
instance.canvas.after.clear()
with instance.canvas.before:
PushMatrix()
Rotate(angle=debug_angle, center=instance.center)
with instance.canvas.after:
PopMatrix()
on_btn_image_pos(instance, child_pos)
opt_button_image.bind(size=on_btn_image_size)
opt_button_image.bind(pos=on_btn_image_pos)
child.bind(pos=on_label_pos)
opt_button_image.add_widget(child)
opt_button_image_factory(opt_button_image, rotated_layout)
# ==========================================
# Rest of the layout
# ==========================================
remain_off_x = 0.0
remain_width = 1 - (rot_width + remain_off_x)
content_layout = RelativeLayout(
size_hint = [remain_width, 1],
pos_hint = {'x': (rot_width + remain_off_x), 'y': 0}
)
description_label = Label(
size_hint = [1, 0.4],
font_name = cfg.font_name,
halign = 'center',
color = (0, 0, 0, 1)
)
image_widget_layout = FloatLayout(
size_hint = [1, 0.7],
pos_hint = {'x': 0, 'y': 0.3}
)
def content_layout_factory(label, layout):
def on_label_size(instance, size):
instance.text_size = [size[0]*0.95, size[1]*0.95]
instance.font_size = 0.28*size[1]
label.bind(size=on_label_size)
# ==========================================
# Image Widget Layout
# ==========================================
for j in range(3):
center_x = 1.0 - (3*j + 2)*0.1
layout_widget = RoutineWidgetLayout(
size_hint = [0.08, 0.8],
pos_hint = {'center_x': center_x, 'center_y': 0.5}
)
layout.add_widget(layout_widget)
content_layout_factory(description_label, image_widget_layout)
content_layout.add_widget(image_widget_layout)
content_layout.add_widget(description_label)
opt_button_image.add_widget(content_layout)
# ==========================================
# Add the child widgets to button
# ==========================================
# ==========================================
# Add button to option layout.
# ==========================================
option_layout.add_widget(opt_button)
return option_layout
def page_recipe(manager: ScreenManager):
app_layout = FloatLayout()
app_bg = Image(source=cfg.user_page, fit_mode = "fill")
# =======================================
# Top Widget Layout (quasi-button)
# =======================================
title_widget = QuasiButtonLabel(
text = 'READY-MADE ROUTINE',
size_hint = [0.50, 0.10],
pos_hint = {'center_x': 0.72, 'y': 0.88},
font_name = cfg.font_name,
)
def pass_list():
home.app_data['exercise_list'] = _routine_list[_routine_choice]
back_button = home.HomePage.new_trans_button(manager, 3)
start_button = home.HomePage.new_trans_button(manager, 6, False, callback=pass_list)
global _start_button
start_button.text = 'START'
start_button.pos_hint = {
'center_x': 0.85,
'center_y': start_button.pos_hint['center_y']
}
start_button.disabled = True
_start_button = start_button
app_layout.add_widget(app_bg)
app_layout.add_widget(title_widget)
app_layout.add_widget(back_button)
app_layout.add_widget(start_button)
app_layout.add_widget(option_recipe(manager))
return app_layout
class RoutineOptionHandler:
def get_option_description(index: int):
'''
Returns the description for the routine option.
Acceptable values:
- index -> {0, 1}
- desc -> *
'''
btn_image = _routine_options[index]
label = btn_image.children[0].children[0]
return label.text
def set_option_description(index: int, desc: str):
'''
Updates the description for the routine option.
Acceptable values:
- index -> {0, 1}
- desc -> *
'''
btn_image = _routine_options[index]
label = btn_image.children[0].children[0]
label.text = desc
def get_option_widget_root(index: int) -> FloatLayout:
'''
Returns the widget root of the 3 custom
RoutineWidgetLayout objects if you need
direct access to the root for caching.
Acceptable values:
- index -> {0, 1}
'''
btn_image = _routine_options[index]
return btn_image.children[0].children[1]
def get_option_widget(index: int, widget_index: int) -> RoutineWidgetLayout:
'''
Acceptable values for the parameters are the following:
- index -> {0, 1}
- widget_index -> {0, 1, 2}
'''
btn_image = _routine_options[index]
return btn_image.children[0].children[1].children[widget_index]
def load_exercises(exer_list, exer_desc_list):
from exercise_class import BaseExercise
global _routine_list
_routine_list = exer_list
for i in range(len(_routine_list)):
widget_root = RoutineOptionHandler.get_option_widget_root(i)
inner_list = _routine_list[i]
for j in range(len(inner_list)):
base_exercise: BaseExercise = inner_list[j]
option_widget: RoutineWidgetLayout = widget_root.children[j]
option_widget.set_reps(base_exercise.reps)
option_widget.set_nsets(base_exercise.sets)
option_widget.set_image_source(base_exercise.img_path)
for i in range(len(exer_desc_list)):
RoutineOptionHandler.set_option_description(i, exer_desc_list[i]) | janalencypino/posedetection | New UI/kivy_ready_made_routine.py | kivy_ready_made_routine.py | py | 17,646 | python | en | code | 0 | github-code | 13 |
26949461254 | #######2022-01-20
from asyncio.windows_events import NULL
import requests
from bs4 import BeautifulSoup as bs
from time import time
import datetime
import openpyxl ####### 엑셀 write에 사용
import all_companys_list as C ####### 종목 코드 리스트
import test_companys_list as T ####### 구동 테스트를 위한 종목 코드 샘플 리스트
from tqdm import tqdm ####### 진행률 표시 - for문에 사용
import finance_config as FC ####### config
####### html 코드 값을 넣어 원하는 위치의 값 추출
def extract(css):
stockContents = soup.select_one(css)
TMP = stockContents.text.replace('\n','').strip()
return TMP.replace(',','').strip()
####### 재무제표에 있는 값을 str로 읽어와 숫자로 변환
def change_float(number):
try:
return round(float(number),FC.SIGNIFICANT_FIGURE)
except ValueError:
return 0
####### extract 함수와 change_float 함수 합
def change(address):
number = change_float(extract(address))
try:
return number
except ValueError:
return 0
except AttributeError:
return 0
####### 엑셀 저장을 위한 파트
row=1
wb = openpyxl.Workbook()
ws = wb.active
####### 엑셀 저장 함수
def write_excel(COMPANYNAME,CAP,SALES,PROFITS,DEBT,QUICK_RATIO,RESERVE_RATIO,DIVIDEND_RATIO,ROE,PER,PBR,EPS,PSR,PRICE,TARGETPRICE,DIFFERNCE,DIFFERNCE_RATIO,URL,row):
ws.cell(row=row+1, column=FC.COL_COMPANY).value = COMPANYNAME ## 회사명
ws.cell(row=row+1, column=FC.COL_CAP).value = CAP ## 시가총액
ws.cell(row=row+1, column=FC.COL_SALES).value = SALES ## 매출
ws.cell(row=row+1, column=FC.COL_PROFITS).value = PROFITS ## 영업이익
ws.cell(row=row+1, column=FC.COL_DEBT).value = DEBT ## 부채
ws.cell(row=row+1, column=FC.COL_QUICK_RATIO).value = QUICK_RATIO ## 당좌비율
ws.cell(row=row+1, column=FC.COL_RESERVE_RATIO).value = RESERVE_RATIO ## 유보율
ws.cell(row=row+1, column=FC.COL_DIVIDEND_RATIO).value = DIVIDEND_RATIO ## 배당률
ws.cell(row=row+1, column=FC.COL_ROE).value = ROE ## ROE
ws.cell(row=row+1, column=FC.COL_PER).value = PER ## PER
ws.cell(row=row+1, column=FC.COL_PBR).value = PBR ## PBR
ws.cell(row=row+1, column=FC.COL_EPS).value = EPS ## EPS
ws.cell(row=row+1, column=FC.COL_PSR).value = PSR ## PSR
ws.cell(row=row+1, column=FC.COL_PRICE).value = PRICE ## 현재가
ws.cell(row=row+1, column=FC.COL_TARGETPRICE).value = TARGETPRICE ## 적정가
ws.cell(row=row+1, column=FC.COL_DIFFERNCE).value = DIFFERNCE ## 괴리 (적정가 - 현재가)
ws.cell(row=row+1, column=FC.COL_DIFFERNCE_RATIO).value = DIFFERNCE_RATIO ## 괴리율% (괴리/현재가)
ws.cell(row=row+1, column=FC.COL_URL).value = URL ## URL
####### 엑셀 헤더
write_excel("회사명","시가총액","매출","영업이익","부채비율(%)","당좌비율(%)","유보율(%)","배당률(%)","ROE","PER","PBR","EPS","PSR","현재가","적정주가","괴리","괴리율(%)","URL",0)
for company in tqdm(FC.MODE.tickers.keys()):
####### AttributeError 발생 시 pass
try:
URL = f"https://finance.naver.com/item/main.naver?code={company}"
req = requests.get(URL)
html = req.text
soup = bs(html, "lxml")
####### 영업이익 - 영업이익 먼저 샘플링, 영업이익이 음수 -> 다음 종목
PROFITS = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(2) > td.t_line.cell_strong")
####### 당기 순이익 - 1
NET_PROFITS1 = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(3) > td:nth-child(2)")
####### 당기 순이익 - 2
NET_PROFITS2 = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(3) > td:nth-child(3)")
####### 당기 순이익 - 3
NET_PROFITS3 = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(3) > td:nth-child(4)")
####### 당기 순이익 - 4
NET_PROFITS4 = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(3) > td:nth-child(5)")
if FC.FILTER == "Y" or FC.FILTER == "y":
condition = "PROFITS > 0 and NET_PROFITS1 > 0 and NET_PROFITS2 > 0 and NET_PROFITS3 > 0 and NET_PROFITS4 > 0"
elif FC.FILTER == "N" or FC.FILTER == "n":
condition = "PROFITS is not NULL and NET_PROFITS1 is not NULL and NET_PROFITS2 is not NULL and NET_PROFITS3 is not NULL and NET_PROFITS4 is not NULL"
if condition:
####### 회사명
COMPANYNAME = extract("#middle > div.h_company > div.wrap_company > h2 > a")
CODE = extract("#middle > div.h_company > div.wrap_company > div > span.code")
####### 시가총액
CAP = change("#content > div.section.trade_compare > table > tbody > tr:nth-child(4) > td:nth-child(2)")
####### 매출
SALES = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(1) > td.t_line.cell_strong")
####### 부채
DEBT = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(7) > td:nth-child(4)")
####### 당좌비율
QUICK_RATIO = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(8) > td:nth-child(4)")
####### 유보율
RESERVE_RATIO = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(9) > td:nth-child(4)")
####### 배당률
DIVIDEND_RATIO = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(15) > td:nth-child(4)")
####### ROE
ROE = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(6) > td.t_line.cell_strong")
####### EPS
EPS = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(10) > td.t_line.cell_strong")
####### PER
PER = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(11) > td.t_line.cell_strong")
####### PBR
PBR = change("#content > div.section.cop_analysis > div.sub_section > table > tbody > tr:nth-child(13) > td.t_line.cell_strong")
####### PRICE
PRICE = change("#content > div.section.invest_trend > div.sub_section.right > table > tbody > tr:nth-child(2) > td:nth-child(2) > em")
####### PSR
####### 매출 값이 없는 회사의 경우 pass
try:
PSR = round(CAP/SALES,FC.SIGNIFICANT_FIGURE)
except ValueError:
continue
except ZeroDivisionError:
continue
####### 적정 주가
try:
TARGETPRICE = ROE*EPS
except ValueError:
continue
####### 괴리 - 적정 주가와 현재 주가의 차이
DIFFERNCE = TARGETPRICE - PRICE
####### 괴리율 - 적정 주가와 현재 주가의 차이
try:
DIFFERNCE_RATIO = round(DIFFERNCE/PRICE,FC.SIGNIFICANT_FIGURE)*100
except ValueError:
continue
except ZeroDivisionError:
continue
####### 값들 중 0 인 값이 있는지 확인하기 위해 모두 곱한 값을 변수로 지정
CHECK_VALUE = SALES*PROFITS*ROE*EPS*PER*PBR*PRICE*PSR*CAP*TARGETPRICE
if CHECK_VALUE != 0 and FC.FILTER == "Y" or FC.FILTER == "y":
if DIFFERNCE > FC.DIFFERNCE_FILTER and EPS > FC.EPS_FILTER and QUICK_RATIO > FC.QUICK_RATIO_FILTER:
if PSR < FC.PSR_FILTER and PBR < FC.PBR_FILTER and DEBT < FC.DEBT_FILTER:
####### 엑셀 입력 함수 호출
write_excel(COMPANYNAME,CAP,SALES,PROFITS,DEBT,QUICK_RATIO,RESERVE_RATIO,DIVIDEND_RATIO,ROE,PER,PBR,EPS,PSR,PRICE,TARGETPRICE,DIFFERNCE,DIFFERNCE_RATIO,URL,row)
row = row + 1
elif CHECK_VALUE != 0 and FC.FILTER == "N" or FC.FILTER == "n":
####### 엑셀 입력 함수 호출
write_excel(COMPANYNAME,CAP,SALES,PROFITS,DEBT,QUICK_RATIO,RESERVE_RATIO,DIVIDEND_RATIO,ROE,PER,PBR,EPS,PSR,PRICE,TARGETPRICE,DIFFERNCE,DIFFERNCE_RATIO,URL,row)
row = row + 1
except AttributeError:
continue
print(f"{row-1} 개의 회사가 저장 되었습니다.")
nowtime = datetime.datetime.now()
day = nowtime.strftime("%Y%m%d")
wb.save(f"./financial_statements_{day}.xlsx") | jongsung1/script | financial statements/financial_statements.py | financial_statements.py | py | 9,537 | python | en | code | 0 | github-code | 13 |
27236871527 | #!/usr/bin/env python3
import logging
import numpy
import math
import copy
import sys
from mini_op2.framework.core import DataType, Parameter, AccessMode
from mini_op2.framework.system import SystemSpecification, SystemInstance, load_hdf5_instance
from mini_op2.framework.control_flow import *
from numpy import ndarray as nda
seq_double=typing.Sequence[float]
seq_seq_double=typing.Sequence[typing.Sequence[float]]
seq_float=typing.Sequence[float]
seq_int=typing.Sequence[int]
from numpy import sqrt, fabs
def dot_product(
x:seq_double,
y:seq_double,
prod:seq_double,
g_sum:seq_double
) -> None:
prod[0] = x[0] * y[0]
g_sum[0] += prod[0]
def build_system(n:int) -> (SystemInstance,Statement):
WRITE=AccessMode.WRITE
READ=AccessMode.READ
INC=AccessMode.INC
RW=AccessMode.RW
system=SystemSpecification()
xvals=system.create_set("xvals")
yvals=system.create_set("yvals")
points=system.create_set("points")
oddeven=system.create_set("oddeven")
p_x=system.create_dat(xvals, "p_x", DataType(shape=(1,)))
p_y=system.create_dat(yvals, "p_y", DataType(shape=(1,)))
p_prod=system.create_dat(points, "p_prod", DataType(shape=(1,)))
p_sums=system.create_dat(oddeven, "p_sums", DataType(shape=(1,)))
xlink=system.create_map("xlink", points, xvals, 1)
ylink=system.create_map("ylink", points, yvals, 1)
sumlink=system.create_map("sumlink", points, oddeven, 1)
instDict={
"xvals" : n,
"yvals" : n,
"points" : n,
"oddeven" : 2,
"p_x" : numpy.array( [[i*1.0] for i in range(n)] ),
"p_y" : numpy.array( [[i*1.0] for i in range(n)] ),
"p_prod" : numpy.array( [ [0.0] for i in range(n)] ),
"xlink" : numpy.array( [[i] for i in range(n)] ),
"ylink" : numpy.array( [[i] for i in range(n)] ),
"sumlink" : numpy.array( [[ i%2 ] for i in range(n)] )
}
inst=SystemInstance(system, instDict)
instOutput={
"output" : {
"final" : copy.copy(instDict)
},
**instDict
}
expected=[i*i*1.0 for i in range(n)]
sumeven=sum( expected[0::2] )
sumodd=sum( expected[1::2] )
sys.stderr.write("exp=[{},{}]".format(sumeven,sumodd))
instOutput["output"]["final"]["p_prod"] = numpy.array( [ [e] for e in expected ] )
instOutput["output"]["final"]["p_sums"] = numpy.array( [ [sumeven], [sumodd] ] )
code=Seq(
ParFor(
dot_product,
points,
p_x(READ, xlink, 0),
p_y(READ, ylink, 0),
p_prod(WRITE),
p_sums(INC, sumlink, 0)
),
CheckState(instOutput, "/output/final"),
)
code.on_bind_spec(sys)
return (system,inst,code)
if __name__=="__main__":
logging.basicConfig(level=4,style="{")
(spec,inst,code)=build_system(8)
code.execute(inst)
| joshjennings98/fyp | graph_schema-4.2.0/apps/nursery/op2/mini_op2/apps/odd_even_dot_product.py | odd_even_dot_product.py | py | 2,922 | python | en | code | 0 | github-code | 13 |
12973426181 | from django.urls import path
from . import views
app_name = "accounts"
urlpatterns = [
path('', views.homepage, name="homepage"),#url for homepage
path('register/', views.register, name="register"), #url for register
path("login/", views.loginpage, name="login"),#url for login
path('logout/', views.logout_page, name="logout"),#url for logout
]
| Pranayea/Heisenberg_ADC2 | Phoby/accounts/urls.py | urls.py | py | 366 | python | en | code | 0 | github-code | 13 |
74514476498 | '''
CHẴN – LẺ - NGUYÊN TỐ
Cho một số nguyên dương không quá 500 chữ số.
Hãy kiểm tra xem số đó có thỏa mãn đồng thời ba tính chất sau hay không?
Vị trí chẵn là chữ số chẵn
Vị trí lẻ là chữ số lẻ
Tổng chữ số là một số nguyên tố.
Input
Dòng đầu ghi số bộ test (không quá 10)
Mỗi bộ test ghi trên một dòng giá trị số nguyên (không quá 500 chữ số)
Output
Với mỗi bộ test, ghi ra YES hoặc NO tùy thuộc kết quả kiểm tra.
'''
t = int(input())
def prime(n):
sum = 0
for i in n:
sum += ord(i)-ord('0')
if sum<2: return False
for i in range(2,int(sum**0.5+1)):
if sum%i==0: return False
return True
def pos(n):
for i in range(0,len(n)):
if (i%2==0 and (ord(n[i])-ord('0'))%2!=0) or (i%2!=0 and (ord(n[i])-ord('0'))%2==0):
return False
return True
while t>0:
n = input()
if prime(n) and pos(n): print('YES')
else: print('NO')
t -= 1 | cuongdh1603/Python-Basic | PY01056.py | PY01056.py | py | 1,030 | python | vi | code | 0 | github-code | 13 |
10345035357 | import numpy as np
def save_weights_model(model, filename):
"""
Save weights to file.
Args:
model: a instance of a Model class that has been trained
filename: string value representing the name of the file
"""
with open(filename, 'wb') as f:
np.save(filename, model.best_weights)
def load_weights_model(filename):
"""
Load weights from a file.
Args:
filename: string value presenting the name of target file
"""
with open(filename, 'rb') as file:
return np.load(file, allow_pickle=True)
def batch_iter(y, tx, batch_size, num_batches=1, shuffle=True):
"""
Generate a minibatch iterator for a dataset.
Takes as input two iterables (here the output desired values 'y' and the input data 'tx')
Outputs an iterator which gives mini-batches of `batch_size` matching elements from `y` and `tx`.
Data can be randomly shuffled to avoid ordering in the original data messing with the randomness of the minibatches.
Example of use :
for minibatch_y, minibatch_tx in batch_iter(y, tx, 32):
<DO-SOMETHING>
"""
data_size = len(y)
if shuffle:
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_y = y[shuffle_indices]
shuffled_tx = tx[shuffle_indices]
else:
shuffled_y = y
shuffled_tx = tx
for batch_num in range(num_batches):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
if start_index != end_index:
yield shuffled_y[start_index:end_index], shuffled_tx[start_index:end_index]
| IrinaMBejan/Higgs_Bosson_Project | scripts/utils.py | utils.py | py | 1,651 | python | en | code | 0 | github-code | 13 |
25055558595 | from random import randint, choice, shuffle
# number 21
num = 3
wins = 0
for i in range(10000):
n = randint(0, 9)
if n == num:
wins += 1
print(f'{wins/100}%')
# number 30
money = 0
for i in range(10000000):
ace = 1
cards = [1, 2, 3, 4, 5]
shuffle(cards)
pick = 0
while ace in cards:
cards.remove(choice(cards))
pick += 1
if pick == 1:
money += 100
elif pick == 2:
money += 50
elif pick == 3:
money += 20
elif pick == 4:
money += 10
elif pick == 5:
money += 5
print(f'${money/10000000}')
| tinuh/applied-statistics | Chapter 10.py | Chapter 10.py | py | 552 | python | en | code | 0 | github-code | 13 |
6497821459 | from PyQt5.QtWidgets import *
import pafy
import os
import urllib.request
from os import path
import sys
import humanize
from moviepy.editor import VideoFileClip
from Main import Ui_MainWindow
class mainapp(QMainWindow , Ui_MainWindow):
def __init__(self , parent=None):
super(mainapp,self).__init__(parent)
QMainWindow.__init__(self)
self.setupUi(self)
self.Handle_UI()
self.Handle_Buttons()
def Handle_UI(self):
self.setFixedSize(777,530)
def Handle_Buttons(self):
self.btn_Download.clicked.connect(self.Download)
self.btn_Browse.clicked.connect(self.Handle_Browser)
self.btn_download_video.clicked.connect(self.Download_Youtube_Video)
self.btn_videoinfo.clicked.connect(self.Get_Youtube_Video)
self.btn_Browse_2.clicked.connect(self.Save_Browse)
self.btn_Download_5.clicked.connect(self.PlayList_Download)
self.btn_Browse_3.clicked.connect(self.Save_Browse)
def Handle_Browser(self):
save_loc = QFileDialog.getSaveFileName(self , caption="Save As" , directory=".",filter="All Files (*.*)")
text = str(save_loc)
savedlocation = (text[2:].split(',')[0].replace("'",""))
self.LE_savelocation.setText(savedlocation)
def Handle_progress(self , blocknum , blocksize , totalsize):
read = blocknum * blocksize
if totalsize > 0:
precent = read * 100 / totalsize
self.progressBar.setValue(precent)
QApplication.processEvents() # partial solution to not responding
def Download(self): #Download files
url = self.LE_inserturl.text()
save_location = self.LE_savelocation.text()
try:
urllib.request.urlretrieve(url , save_location , self.Handle_progress)
except Exception:
QMessageBox.warning(self, "Download Error", "The Download Failed")
QMessageBox.information(self , "Download Completed" , "The Download Finished")
self.progressBar.setValue(0)
self.LE_inserturl.setText('')
self.LE_savelocation.setText('')
def Get_Youtube_Video(self):
te = str(self.LE_insertvideourl.text())
if (te == ''):
QMessageBox.information(self, "Download Error", "Please insert The Video URL then Click on find video info")
else:
try:
video_link = self.LE_insertvideourl.text()
video = pafy.new(video_link)
streams = video.videostreams
streamsaudio = video.audiostreams
for st in streams:
size = humanize.naturalsize(st.get_filesize())
if(st.extension == 'mp4'):
video_data = '{} {} {} {}' .format(st.mediatype , st.extension , st.quality , size)
self.cmb_videoinfo.addItem(video_data)
for st2 in streamsaudio:
size2 = humanize.naturalsize(st2.get_filesize())
if (st2.extension == 'm4a'):
audio_data = '{} {} {} {}'.format(st2.mediatype, st2.extension, st2.quality, size2)
self.cmb_videoinfo_2.addItem(audio_data)
except Exception:
QMessageBox.warning(self, "Get info Error", "Cantnot get information")
def Handle_progress_bar_video_Download(self,total, recvd, ratio, rate, eta):
self.progressBar_4.setValue(ratio*100)
QApplication.processEvents()
def Handle_progress_bar_audio_Download(self,total, recvd, ratio, rate, eta):
self.progressBar_5.setValue(ratio*100)
QApplication.processEvents()
def Download_Youtube_Video(self):
te = str(self.LE_insertvideourl.text())
te1 = str(self.LE_savelocation_2.text())
if(self.cmb_videoinfo.currentIndex() == -1):
QMessageBox.information(self, "Download Error", "Please click on Find Video info button to get differen qualities and sizes")
elif(te == ''):
QMessageBox.information(self, "Download Error", "Please insert The Video URL then Click on find video info")
elif(te1 == ''):
QMessageBox.information(self, "Download Error", "Browse and select folder to save video")
else:
try:
video_link = self.LE_insertvideourl.text()
video = pafy.new(video_link)
save_location = self.LE_savelocation_2.text()
st = video.videostreams
st2 = video.audiostreams
quality = self.cmb_videoinfo.currentIndex()
quality2 = self.cmb_videoinfo_2.currentIndex()
if(quality == -1):
h = video.getbest()
down = h.download(filepath = save_location ,quiet=True,callback=self.Handle_progress_bar_video_Download)
else:
down = st[quality].download(filepath = save_location ,quiet=True,callback=self.Handle_progress_bar_video_Download)
down2 = st2[quality2].download(filepath = save_location ,quiet=True,callback=self.Handle_progress_bar_audio_Download)
QMessageBox.information(self, "Download Completed", "The Download Finished, Pleeeeease Wait minute to combine the video and audio")
try:
pathvideo = save_location + "/" + video.title + ".mp4"
pathaudio = save_location + "/" + video.title + ".m4a"
newpathvideo = save_location + "/" + video.title + "New" + ".mp4"
clip = VideoFileClip(pathvideo).subclip(0 , t_end=None)
clip.write_videofile(newpathvideo , audio = pathaudio)
QMessageBox.information(self, "Compining Completed", "The Compining between audio and video Finished")
QApplication.processEvents()
except Exception:
QMessageBox.warning(self, "Combining Error", "The Combining Failed")
except Exception:
QMessageBox.warning(self, "Download Error", "The Download Failed")
self.LE_insertvideourl.setText('')
self.LE_savelocation_2.setText('')
def Save_Browse(self):
save = QFileDialog.getExistingDirectory(self , "Select Download Directory")
self.LE_savelocation_2.setText(save)
self.LE_savelocation_3.setText(save)
def PlayList_Download(self):
tex = str(self.LE_inserturl_3.text())
tex1 = str(self.LE_savelocation_3.text())
if (tex == ''):
QMessageBox.information(self, "Download Error", "Please insert The playlist URL")
elif(tex1 == ''):
QMessageBox.information(self, "Download Error", "Browse and select folder to put playlist in")
else:
try:
playlist_url = self.LE_inserturl_3.text()
save_location = self.LE_savelocation_3.text()
playlist = pafy.get_playlist(playlist_url)
videos = playlist['items']
os.chdir(save_location)
if os.path.exists(str(playlist['title'])):
os.chdir(str(playlist['title']))
else:
os.mkdir(str(playlist['title']))
os.chdir(str(playlist['title']))
for video in videos:
p = video['pafy']
best = p.getbest(preftype='mp4')
best.download()
except Exception:
QMessageBox.warning(self, "Download Error", "The Download Failed")
def main():
app = QApplication(sys.argv)
window = mainapp()
window.show()
app.exec_()
if __name__ == '__main__':
main()
| MohamedMostafaSoliman/Download-Manager | DownloadManager/DM.py | DM.py | py | 7,769 | python | en | code | 0 | github-code | 13 |
30810020044 | from django.http import Http404
from django.shortcuts import render, get_object_or_404, redirect
from django.contrib.auth.decorators import login_required
from .forms import ProductForm #, RawProductForm
from .models import Product
# Create your views here.
#Created views for each of the CRUD actions
#Using Django login_required to only allow logged in users to access inventory
#If an unauthenticated user tries to access our inventory, we redirect them to our login page
@login_required(login_url='/login/')
def product_create_view(request):
form = ProductForm(request.POST or None)
if form.is_valid():
form.save()
form = ProductForm()
context = {
'form': form
}
return render(request, "products/product_create.html", context)
@login_required(login_url='/login/')
def product_update_view(request, id=id):
obj = get_object_or_404(Product, id=id)
form = ProductForm(request.POST or None, instance=obj)
if form.is_valid():
form.save()
context = {
'form': form
}
return render(request, "products/product_create.html", context)
@login_required(login_url='/login/')
def product_list_view(request):
queryset = Product.objects.all()
context = {
"object_list": queryset
}
return render(request, "products/product_list.html", context)
@login_required(login_url='/login/')
def product_detail_view(request, id):
obj = get_object_or_404(Product, id=id)
context = {
"object": obj
}
return render(request, "products/product_detail.html", context)
@login_required(login_url='/login/')
def product_delete_view(request, id):
obj = get_object_or_404(Product, id=id)
if request.method == "POST":
obj.delete()
return redirect('../../')
context = {
"object": obj
}
return render(request, "products/product_delete.html", context) | Shahran29/InventoryManagement | src/trydjango/products/views.py | views.py | py | 1,894 | python | en | code | 0 | github-code | 13 |
72915378898 | import os
import os.path
import io
import re
import sys
import enum
import json
import fnmatch
import datetime
import traceback
import functools
import contextlib
import shlex
import mimetypes
from typing import (Any, Callable, IO, Iterator,
Optional, Sequence, Tuple, List, Type, Union,
TypeVar, Protocol)
from qutebrowser.qt.core import QUrl, QVersionNumber, QRect, QPoint
from qutebrowser.qt.gui import QClipboard, QDesktopServices
from qutebrowser.qt.widgets import QApplication
import yaml
try:
from yaml import (CSafeLoader as YamlLoader,
CSafeDumper as YamlDumper)
YAML_C_EXT = True
except ImportError: # pragma: no cover
from yaml import (SafeLoader as YamlLoader, # type: ignore[assignment]
SafeDumper as YamlDumper)
YAML_C_EXT = False
from qutebrowser.utils import log
fake_clipboard = None
log_clipboard = False
is_mac = sys.platform.startswith('darwin')
is_linux = sys.platform.startswith('linux')
is_windows = sys.platform.startswith('win')
is_posix = os.name == 'posix'
_C = TypeVar("_C", bound="Comparable")
class Comparable(Protocol):
"""Protocol for a "comparable" object."""
def __lt__(self: _C, other: _C) -> bool:
...
def __ge__(self: _C, other: _C) -> bool:
...
class VersionNumber:
"""A representation of a version number."""
def __init__(self, *args: int) -> None:
self._ver = QVersionNumber(args) # not *args, to support >3 components
if self._ver.isNull():
raise ValueError("Can't construct a null version")
normalized = self._ver.normalized()
if normalized != self._ver:
raise ValueError(
f"Refusing to construct non-normalized version from {args} "
f"(normalized: {tuple(normalized.segments())}).")
self.major = self._ver.majorVersion()
self.minor = self._ver.minorVersion()
self.patch = self._ver.microVersion()
self.segments = self._ver.segments()
def __str__(self) -> str:
return ".".join(str(s) for s in self.segments)
def __repr__(self) -> str:
args = ", ".join(str(s) for s in self.segments)
return f'VersionNumber({args})'
def strip_patch(self) -> 'VersionNumber':
"""Get a new VersionNumber with the patch version removed."""
return VersionNumber(*self.segments[:2])
@classmethod
def parse(cls, s: str) -> 'VersionNumber':
"""Parse a version number from a string."""
ver, _suffix = QVersionNumber.fromString(s)
# FIXME: Should we support a suffix?
if ver.isNull():
raise ValueError(f"Failed to parse {s}")
return cls(*ver.normalized().segments())
def __hash__(self) -> int:
return hash(self._ver)
def __eq__(self, other: object) -> bool:
if not isinstance(other, VersionNumber):
return NotImplemented
return self._ver == other._ver
def __ne__(self, other: object) -> bool:
if not isinstance(other, VersionNumber):
return NotImplemented
return self._ver != other._ver
# FIXME:mypy type ignores below needed for PyQt5-stubs:
# Unsupported left operand type for ... ("QVersionNumber")
def __ge__(self, other: 'VersionNumber') -> bool:
return self._ver >= other._ver # type: ignore[operator,unused-ignore]
def __gt__(self, other: 'VersionNumber') -> bool:
return self._ver > other._ver # type: ignore[operator,unused-ignore]
def __le__(self, other: 'VersionNumber') -> bool:
return self._ver <= other._ver # type: ignore[operator,unused-ignore]
def __lt__(self, other: 'VersionNumber') -> bool:
return self._ver < other._ver # type: ignore[operator,unused-ignore]
class Unreachable(Exception):
"""Raised when there was unreachable code."""
class ClipboardError(Exception):
"""Raised if the clipboard contents are unavailable for some reason."""
class SelectionUnsupportedError(ClipboardError):
"""Raised if [gs]et_clipboard is used and selection=True is unsupported."""
def __init__(self) -> None:
super().__init__("Primary selection is not supported on this "
"platform!")
class ClipboardEmptyError(ClipboardError):
"""Raised if get_clipboard is used and the clipboard is empty."""
def elide(text: str, length: int) -> str:
"""Elide text so it uses a maximum of length chars."""
if length < 1:
raise ValueError("length must be >= 1!")
if len(text) <= length:
return text
else:
return text[:length - 1] + '\u2026'
def elide_filename(filename: str, length: int) -> str:
"""Elide a filename to the given length.
The difference to the elide() is that the text is removed from
the middle instead of from the end. This preserves file name extensions.
Additionally, standard ASCII dots are used ("...") instead of the unicode
"…" (U+2026) so it works regardless of the filesystem encoding.
This function does not handle path separators.
Args:
filename: The filename to elide.
length: The maximum length of the filename, must be at least 3.
Return:
The elided filename.
"""
elidestr = '...'
if length < len(elidestr):
raise ValueError('length must be greater or equal to 3')
if len(filename) <= length:
return filename
# Account for '...'
length -= len(elidestr)
left = length // 2
right = length - left
if right == 0:
return filename[:left] + elidestr
else:
return filename[:left] + elidestr + filename[-right:]
def compact_text(text: str, elidelength: int = None) -> str:
"""Remove leading whitespace and newlines from a text and maybe elide it.
Args:
text: The text to compact.
elidelength: To how many chars to elide.
"""
lines = []
for line in text.splitlines():
lines.append(line.strip())
out = ''.join(lines)
if elidelength is not None:
out = elide(out, elidelength)
return out
def format_seconds(total_seconds: int) -> str:
"""Format a count of seconds to get a [H:]M:SS string."""
prefix = '-' if total_seconds < 0 else ''
hours, rem = divmod(abs(round(total_seconds)), 3600)
minutes, seconds = divmod(rem, 60)
chunks = []
if hours:
chunks.append(str(hours))
min_format = '{:02}'
else:
min_format = '{}'
chunks.append(min_format.format(minutes))
chunks.append('{:02}'.format(seconds))
return prefix + ':'.join(chunks)
def format_size(size: Optional[float], base: int = 1024, suffix: str = '') -> str:
"""Format a byte size so it's human readable.
Inspired by https://stackoverflow.com/q/1094841
"""
prefixes = ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
if size is None:
return '?.??' + suffix
for p in prefixes:
if -base < size < base:
return '{:.02f}{}{}'.format(size, p, suffix)
size /= base
return '{:.02f}{}{}'.format(size, prefixes[-1], suffix)
class FakeIOStream(io.TextIOBase):
"""A fake file-like stream which calls a function for write-calls."""
def __init__(self, write_func: Callable[[str], int]) -> None:
super().__init__()
self.write = write_func # type: ignore[method-assign]
@contextlib.contextmanager
def fake_io(write_func: Callable[[str], int]) -> Iterator[None]:
"""Run code with stdout and stderr replaced by FakeIOStreams.
Args:
write_func: The function to call when write is called.
"""
old_stdout = sys.stdout
old_stderr = sys.stderr
fake_stderr = FakeIOStream(write_func)
fake_stdout = FakeIOStream(write_func)
sys.stderr = fake_stderr # type: ignore[assignment]
sys.stdout = fake_stdout # type: ignore[assignment]
try:
yield
finally:
# If the code we did run did change sys.stdout/sys.stderr, we leave it
# unchanged. Otherwise, we reset it.
if sys.stdout is fake_stdout: # type: ignore[comparison-overlap]
sys.stdout = old_stdout
if sys.stderr is fake_stderr: # type: ignore[comparison-overlap]
sys.stderr = old_stderr
@contextlib.contextmanager
def disabled_excepthook() -> Iterator[None]:
"""Run code with the exception hook temporarily disabled."""
old_excepthook = sys.excepthook
sys.excepthook = sys.__excepthook__
try:
yield
finally:
# If the code we did run did change sys.excepthook, we leave it
# unchanged. Otherwise, we reset it.
if sys.excepthook is sys.__excepthook__:
sys.excepthook = old_excepthook
class prevent_exceptions: # noqa: N801,N806 pylint: disable=invalid-name
"""Decorator to ignore and log exceptions.
This needs to be used for some places where PyQt segfaults on exceptions or
silently ignores them.
We used to re-raise the exception with a single-shot QTimer in a similar
case, but that lead to a strange problem with a KeyError with some random
jinja template stuff as content. For now, we only log it, so it doesn't
pass 100% silently.
This could also be a function, but as a class (with a "wrong" name) it's
much cleaner to implement.
Attributes:
_retval: The value to return in case of an exception.
_predicate: The condition which needs to be True to prevent exceptions
"""
def __init__(self, retval: Any, predicate: bool = True) -> None:
"""Save decorator arguments.
Gets called on parse-time with the decorator arguments.
Args:
See class attributes.
"""
self._retval = retval
self._predicate = predicate
def __call__(self, func: Callable[..., Any]) -> Callable[..., Any]:
"""Called when a function should be decorated.
Args:
func: The function to be decorated.
Return:
The decorated function.
"""
if not self._predicate:
return func
retval = self._retval
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Any:
"""Call the original function."""
try:
return func(*args, **kwargs)
except BaseException:
log.misc.exception("Error in {}".format(qualname(func)))
return retval
return wrapper
def is_enum(obj: Any) -> bool:
"""Check if a given object is an enum."""
try:
return issubclass(obj, enum.Enum)
except TypeError:
return False
def get_repr(obj: Any, constructor: bool = False, **attrs: Any) -> str:
"""Get a suitable __repr__ string for an object.
Args:
obj: The object to get a repr for.
constructor: If True, show the Foo(one=1, two=2) form instead of
<Foo one=1 two=2>.
**attrs: The attributes to add.
"""
cls = qualname(obj.__class__)
parts = []
items = sorted(attrs.items())
for name, val in items:
parts.append('{}={!r}'.format(name, val))
if constructor:
return '{}({})'.format(cls, ', '.join(parts))
elif parts:
return '<{} {}>'.format(cls, ' '.join(parts))
else:
return '<{}>'.format(cls)
def qualname(obj: Any) -> str:
"""Get the fully qualified name of an object.
Based on twisted.python.reflect.fullyQualifiedName.
Should work with:
- functools.partial objects
- functions
- classes
- methods
- modules
"""
if isinstance(obj, functools.partial):
obj = obj.func
if hasattr(obj, '__module__'):
prefix = '{}.'.format(obj.__module__)
else:
prefix = ''
if hasattr(obj, '__qualname__'):
return '{}{}'.format(prefix, obj.__qualname__)
elif hasattr(obj, '__name__'):
return '{}{}'.format(prefix, obj.__name__)
else:
return repr(obj)
_ExceptionType = Union[Type[BaseException], Tuple[Type[BaseException]]]
def raises(exc: _ExceptionType, func: Callable[..., Any], *args: Any) -> bool:
"""Check if a function raises a given exception.
Args:
exc: A single exception or an iterable of exceptions.
func: A function to call.
*args: The arguments to pass to the function.
Returns:
True if the exception was raised, False otherwise.
"""
try:
func(*args)
except exc:
return True
else:
return False
def force_encoding(text: str, encoding: str) -> str:
"""Make sure a given text is encodable with the given encoding.
This replaces all chars not encodable with question marks.
"""
return text.encode(encoding, errors='replace').decode(encoding)
def sanitize_filename(name: str,
replacement: Optional[str] = '_',
shorten: bool = False) -> str:
"""Replace invalid filename characters.
Note: This should be used for the basename, as it also removes the path
separator.
Args:
name: The filename.
replacement: The replacement character (or None).
shorten: Shorten the filename if it's too long for the filesystem.
"""
if replacement is None:
replacement = ''
# Remove chars which can't be encoded in the filename encoding.
# See https://github.com/qutebrowser/qutebrowser/issues/427
encoding = sys.getfilesystemencoding()
name = force_encoding(name, encoding)
# See also
# https://en.wikipedia.org/wiki/Filename#Reserved_characters_and_words
if is_windows:
bad_chars = '\\/:*?"<>|'
elif is_mac:
# Colons can be confusing in finder https://superuser.com/a/326627
bad_chars = '/:'
else:
bad_chars = '/'
for bad_char in bad_chars:
name = name.replace(bad_char, replacement)
if not shorten:
return name
# Truncate the filename if it's too long.
# Most filesystems have a maximum filename length of 255 bytes:
# https://en.wikipedia.org/wiki/Comparison_of_file_systems#Limits
# We also want to keep some space for QtWebEngine's ".download" suffix, as
# well as deduplication counters.
max_bytes = 255 - len("(123).download")
root, ext = os.path.splitext(name)
root = root[:max_bytes - len(ext)]
excess = len(os.fsencode(root + ext)) - max_bytes
while excess > 0 and root:
# Max 4 bytes per character is assumed.
# Integer division floors to -∞, not to 0.
root = root[:(-excess // 4)]
excess = len(os.fsencode(root + ext)) - max_bytes
if not root:
# Trimming the root is not enough. We must trim the extension.
# We leave one character in the root, so that the filename
# doesn't start with a dot, which makes the file hidden.
root = name[0]
excess = len(os.fsencode(root + ext)) - max_bytes
while excess > 0 and ext:
ext = ext[:(-excess // 4)]
excess = len(os.fsencode(root + ext)) - max_bytes
assert ext, name
name = root + ext
return name
def _clipboard() -> QClipboard:
"""Get the QClipboard and make sure it's not None."""
clipboard = QApplication.clipboard()
assert clipboard is not None
return clipboard
def set_clipboard(data: str, selection: bool = False) -> None:
"""Set the clipboard to some given data."""
global fake_clipboard
if selection and not supports_selection():
raise SelectionUnsupportedError
if log_clipboard:
what = 'primary selection' if selection else 'clipboard'
log.misc.debug("Setting fake {}: {}".format(what, json.dumps(data)))
fake_clipboard = data
else:
mode = QClipboard.Mode.Selection if selection else QClipboard.Mode.Clipboard
_clipboard().setText(data, mode=mode)
def get_clipboard(selection: bool = False, fallback: bool = False) -> str:
"""Get data from the clipboard.
Args:
selection: Use the primary selection.
fallback: Fall back to the clipboard if primary selection is
unavailable.
"""
global fake_clipboard
if fallback and not selection:
raise ValueError("fallback given without selection!")
if selection and not supports_selection():
if fallback:
selection = False
else:
raise SelectionUnsupportedError
if fake_clipboard is not None:
data = fake_clipboard
fake_clipboard = None
else:
mode = QClipboard.Mode.Selection if selection else QClipboard.Mode.Clipboard
data = _clipboard().text(mode=mode)
target = "Primary selection" if selection else "Clipboard"
if not data.strip():
raise ClipboardEmptyError("{} is empty.".format(target))
log.misc.debug("{} contained: {!r}".format(target, data))
return data
def supports_selection() -> bool:
"""Check if the OS supports primary selection."""
return _clipboard().supportsSelection()
def open_file(filename: str, cmdline: str = None) -> None:
"""Open the given file.
If cmdline is not given, downloads.open_dispatcher is used.
If open_dispatcher is unset, the system's default application is used.
Args:
filename: The filename to open.
cmdline: The command to use as string. A `{}` is expanded to the
filename. None means to use the system's default application
or `downloads.open_dispatcher` if set. If no `{}` is found,
the filename is appended to the cmdline.
"""
# Import late to avoid circular imports:
# - usertypes -> utils -> guiprocess -> message -> usertypes
# - usertypes -> utils -> config -> configdata -> configtypes ->
# cmdutils -> command -> message -> usertypes
from qutebrowser.config import config
from qutebrowser.misc import guiprocess
from qutebrowser.utils import version, message
# the default program to open downloads with - will be empty string
# if we want to use the default
override = config.val.downloads.open_dispatcher
if version.is_flatpak():
if cmdline:
message.error("Cannot spawn download dispatcher from sandbox")
return
if override:
message.warning("Ignoring download dispatcher from config in "
"sandbox environment")
override = None
# precedence order: cmdline > downloads.open_dispatcher > openUrl
if cmdline is None and not override:
log.misc.debug("Opening {} with the system application"
.format(filename))
url = QUrl.fromLocalFile(filename)
QDesktopServices.openUrl(url)
return
if cmdline is None and override:
cmdline = override
assert cmdline is not None
cmd, *args = shlex.split(cmdline)
args = [arg.replace('{}', filename) for arg in args]
if '{}' not in cmdline:
args.append(filename)
log.misc.debug("Opening {} with {}"
.format(filename, [cmd] + args))
proc = guiprocess.GUIProcess(what='open-file')
proc.start_detached(cmd, args)
def unused(_arg: Any) -> None:
"""Function which does nothing to avoid pylint complaining."""
def expand_windows_drive(path: str) -> str:
r"""Expand a drive-path like E: into E:\.
Does nothing for other paths.
Args:
path: The path to expand.
"""
# Usually, "E:" on Windows refers to the current working directory on drive
# E:\. The correct way to specifify drive E: is "E:\", but most users
# probably don't use the "multiple working directories" feature and expect
# "E:" and "E:\" to be equal.
if re.fullmatch(r'[A-Z]:', path, re.IGNORECASE):
return path + "\\"
else:
return path
def yaml_load(f: Union[str, IO[str]]) -> Any:
"""Wrapper over yaml.load using the C loader if possible."""
start = datetime.datetime.now()
# WORKAROUND for https://github.com/yaml/pyyaml/pull/181
with log.py_warning_filter(
category=DeprecationWarning,
message=r"Using or importing the ABCs from 'collections' instead "
r"of from 'collections\.abc' is deprecated.*"):
try:
data = yaml.load(f, Loader=YamlLoader)
except ValueError as e: # pragma: no cover
pyyaml_error = 'could not convert string to float'
if str(e).startswith(pyyaml_error):
# WORKAROUND for https://github.com/yaml/pyyaml/issues/168
raise yaml.YAMLError(e)
raise
end = datetime.datetime.now()
delta = (end - start).total_seconds()
deadline = 10 if 'CI' in os.environ else 2
if delta > deadline: # pragma: no cover
log.misc.warning(
"YAML load took unusually long, please report this at "
"https://github.com/qutebrowser/qutebrowser/issues/2777\n"
"duration: {}s\n"
"PyYAML version: {}\n"
"C extension: {}\n"
"Stack:\n\n"
"{}".format(
delta, yaml.__version__, YAML_C_EXT,
''.join(traceback.format_stack())))
return data
def yaml_dump(data: Any, f: IO[str] = None) -> Optional[str]:
"""Wrapper over yaml.dump using the C dumper if possible.
Also returns a str instead of bytes.
"""
yaml_data = yaml.dump(data, f, Dumper=YamlDumper, default_flow_style=False,
encoding='utf-8', allow_unicode=True)
if yaml_data is None:
return None
else:
return yaml_data.decode('utf-8')
_T = TypeVar('_T')
def chunk(elems: Sequence[_T], n: int) -> Iterator[Sequence[_T]]:
"""Yield successive n-sized chunks from elems.
If elems % n != 0, the last chunk will be smaller.
"""
if n < 1:
raise ValueError("n needs to be at least 1!")
for i in range(0, len(elems), n):
yield elems[i:i + n]
def guess_mimetype(filename: str, fallback: bool = False) -> str:
"""Guess a mimetype based on a filename.
Args:
filename: The filename to check.
fallback: Fall back to application/octet-stream if unknown.
"""
mimetype, _encoding = mimetypes.guess_type(filename)
if mimetype is None:
if fallback:
return 'application/octet-stream'
else:
raise ValueError("Got None mimetype for {}".format(filename))
return mimetype
def ceil_log(number: int, base: int) -> int:
"""Compute max(1, ceil(log(number, base))).
Use only integer arithmetic in order to avoid numerical error.
"""
if number < 1 or base < 2:
raise ValueError("math domain error")
result = 1
accum = base
while accum < number:
result += 1
accum *= base
return result
def parse_duration(duration: str) -> int:
"""Parse duration in format XhYmZs into milliseconds duration."""
if duration.isdigit():
# For backward compatibility return milliseconds
return int(duration)
match = re.fullmatch(
r'(?P<hours>[0-9]+(\.[0-9])?h)?\s*'
r'(?P<minutes>[0-9]+(\.[0-9])?m)?\s*'
r'(?P<seconds>[0-9]+(\.[0-9])?s)?',
duration
)
if not match or not match.group(0):
raise ValueError(
f"Invalid duration: {duration} - "
"expected XhYmZs or a number of milliseconds"
)
seconds_string = match.group('seconds') if match.group('seconds') else '0'
seconds = float(seconds_string.rstrip('s'))
minutes_string = match.group('minutes') if match.group('minutes') else '0'
minutes = float(minutes_string.rstrip('m'))
hours_string = match.group('hours') if match.group('hours') else '0'
hours = float(hours_string.rstrip('h'))
milliseconds = int((seconds + minutes * 60 + hours * 3600) * 1000)
return milliseconds
def mimetype_extension(mimetype: str) -> Optional[str]:
"""Get a suitable extension for a given mimetype.
This mostly delegates to Python's mimetypes.guess_extension(), but backports some
changes (via a simple override dict) which are missing from earlier Python versions.
Most likely, this can be dropped once the minimum Python version is raised to 3.10.
"""
overrides = {
# Added in 3.10
"application/x-hdf5": ".h5",
# Added around 3.8
"application/manifest+json": ".webmanifest",
}
if mimetype in overrides:
return overrides[mimetype]
return mimetypes.guess_extension(mimetype, strict=False)
@contextlib.contextmanager
def cleanup_file(filepath: str) -> Iterator[None]:
"""Context that deletes a file upon exit or error.
Args:
filepath: The file path
"""
try:
yield
finally:
try:
os.remove(filepath)
except OSError as e:
log.misc.error(f"Failed to delete tempfile {filepath} ({e})!")
_RECT_PATTERN = re.compile(r'(?P<w>\d+)x(?P<h>\d+)\+(?P<x>\d+)\+(?P<y>\d+)')
def parse_rect(s: str) -> QRect:
"""Parse a rectangle string like 20x20+5+3.
Negative offsets aren't supported, and neither is leaving off parts of the string.
"""
match = _RECT_PATTERN.match(s)
if not match:
raise ValueError(f"String {s} does not match WxH+X+Y")
w = int(match.group('w'))
h = int(match.group('h'))
x = int(match.group('x'))
y = int(match.group('y'))
try:
rect = QRect(x, y, w, h)
except OverflowError as e:
raise ValueError(e)
if not rect.isValid():
raise ValueError("Invalid rectangle")
return rect
def parse_point(s: str) -> QPoint:
"""Parse a point string like 13,-42."""
try:
x, y = map(int, s.split(',', maxsplit=1))
except ValueError:
raise ValueError(f"String {s} does not match X,Y")
try:
return QPoint(x, y)
except OverflowError as e:
raise ValueError(e)
def match_globs(patterns: List[str], value: str) -> Optional[str]:
"""Match a list of glob-like patterns against a value.
Return:
The first matching pattern if there was a match, None with no match.
"""
for pattern in patterns:
if fnmatch.fnmatchcase(name=value, pat=pattern):
return pattern
return None
| qutebrowser/qutebrowser | qutebrowser/utils/utils.py | utils.py | py | 26,444 | python | en | code | 9,084 | github-code | 13 |
17657437204 | # https://www.hackerrank.com/challenges/ip-address-validation/problem?h_r=next-challenge&h_v=zen&isFullScreen=false
import re
def main():
# define patterns
pattern4 = r"^(([\d]{1,2}|[1][\d][\d]|[2][0-5][0-5])[\.]?){4}$"
pattern6 = r"^(([a-f\d]?){3}[a-f\d][:]){7}([a-f\d]?){4}$"
# read input then process them
n = int(input())
for i in range(n):
s = input()
IPv4 = re.compile(pattern4)
if IPv4.match(s) is not None:
print("IPv4")
continue
IPv6 = re.compile(pattern6)
if IPv6.match(s) is not None:
print("IPv6")
continue
print("Neither")
main()
| dp-wu/HackerRank | regex/IP_Address_Validation.py | IP_Address_Validation.py | py | 667 | python | en | code | 0 | github-code | 13 |
12001537440 | # _*_ coding:utf8 _*_
'''
Pedagogical example realization of seq2seq recurrent neural networks, using TensorFlow and TFLearn.
More info at https://github.com/ichuang/tflearn_seq2seq
'''
from __future__ import division, print_function
import sys
# sys.setdefaultencoding('utf-8')
class DataLoad():
def __init__(self):
pass
def load_data(self, file="data.txt"):
data = []
with open(file, "r") as fp:
line = fp.readline()
while line:
terms = line.split(" ")
id = terms[0]
values = terms[2:9]
int_values = []
for v in values:
int_values.append(int(v))
# print(id, int_values)
data.append((id, int_values))
line = fp.readline()
all_data = []
print(len(data))
for j in range(7):
index = []
data_value = []
for i in range(len(data)-1):
print(data[i][0], data[i][1], data[i+1][1][j])
index.append(data[i][0])
data_value.append((data[i][1], data[i+1][1][j]))
all_data.append(data_value)
return all_data
# if __name__ == '__main__':
# load_data()
| liguoyu1/python | Learning/CNN/DataLoad.py | DataLoad.py | py | 1,280 | python | en | code | 49 | github-code | 13 |
30998852749 | from base import Tab
from PySide6.QtCore import Slot
class ScihubTab(Tab):
def __init__(self, app, window, backend) -> None:
super().__init__(app, window, backend)
self.window.refIdentifierEdit.setPlaceholderText('DOI|PMID|URL')
self.window.proxyEdit.setPlaceholderText('http://127.0.0.1:8889')
def set_interaction_logic(self):
self.window.refreshSourceButton.clicked.connect(self._on_refresh)
self.window.submitButton.clicked.connect(self._on_submit)
self.window.setProxyButton.clicked.connect(self._on_proxy_set)
self.window.downloadButton.clicked.connect(self._on_download)
@Slot()
def _on_submit(self):
refSource = self.window.refIdentifierEdit.text()
data = self.backend.parse_doi_arXiv(refSource)
self.window.metaDataDisplay.setPlainText(str(data))
@Slot()
def _on_refresh(self):
# set both min & max to 0 to switch progressBar to BUSY state
# see: https://doc.qt.io/qtforpython/PySide6/QtWidgets/QProgressBar.html
# use progressBar.setValue(int) to determine progress
self.window.progressBar.setMinimum(0)
self.window.progressBar.setMaximum(0)
urls = self.backend.find_avaliable_scihub_urls()
self.window.sourceList.addItems(urls)
self.window.progressBar.setMinimum(0)
self.window.progressBar.setMaximum(100)
@Slot()
def _on_proxy_set(self):
proxy = self.window.proxyEdit.text()
print(proxy)
self.backend.set_proxy_using_by_scihub(proxy)
@Slot()
def _on_proxy_clear(self):
self.backend.clear_proxy_using_by_scihub()
@Slot()
def _on_download(self):
refSource = self.window.refIdentifierEdit.text()
self.backend.download_from_scihub(refSource) | Roy-Kid/paperInspector | paperInspector/scihubTab.py | scihubTab.py | py | 1,808 | python | en | code | 0 | github-code | 13 |
27738677662 | import torch
import torchvision
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
from Resnet_fashion import*
from IPython import display
import time
import sys
import random
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
import torch.nn.functional as F
import numpy as np
import torch.nn as nn
import torch.optim as optim
import os
from torch.autograd import Variable
import torchvision.datasets as dsets
def use_svg_display():
display.set_matplotlib_formats('retina')
def get_fashion_mnist_labels(labels):
text_labels = ['t-shirt', 'trouser', 'pullover', 'dress','coat','sandal', 'shirt', 'sneaker', 'bag', 'ankle boot']
return [text_labels[i] for i in labels]
text_labels = ['t-shirt', 'trouser', 'pullover', 'dress','coat','sandal', 'shirt', 'sneaker', 'bag', 'ankle boot']
def show_fashion_mnist(images,labels):
use_svg_display()
_,figs=plt.subplots(1,len(images),figsize=(25,25))#
for f,img,lbl in zip(figs,images,labels):
f.imshow(img.view((28,28)).numpy())
f.set_title(lbl,color='white')
f.axes.get_xaxis().set_visible(False)
f.axes.get_yaxis().set_visible(False)
plt.show()
#查看训练数据集中部分图像内容与类别
x,y=[],[]
for i in range(10):
x.append(mnist_train[i][0])
y.append(mnist_train[i][1])
show_fashion_mnist(x,get_fashion_mnist_labels(y))
batch_size=128
trainloader=torch.utils.data.DataLoader(mnist_train,batch_size=batch_size,shuffle=True,
num_workers=2)
testloader=torch.utils.data.DataLoader(mnist_test,batch_size=batch_size,shuffle=False,
num_workers=2)
net = resnet34(10,True)
net = net.cuda()
import torch.optim as optim
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(), lr = 0.005,momentum = 0.9)
def train(epoch, net, dataloader):
train_loss = 0
net.train()
for i, (datas, labels) in enumerate(dataloader):
datas, labels = datas.to('cuda'), labels.to('cuda')
optimizer.zero_grad()
outputs = net(datas)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
train_loss += loss.item()
train_loss /= len(dataloader)
return train_loss
def test(epoch, net, dataloader):
test_loss = 0
net.eval()
for datas, labels in dataloader:
datas, labels = datas.to('cuda'), labels.to('cuda')
with torch.no_grad():
outputs = net(datas)
loss = criterion(outputs, labels)
test_loss += loss.item()
test_loss /= len(dataloader)
return test_loss
EPOCHS = 9
train_loss = []
test_loss = []
for epoch in range(EPOCHS):
trn_loss = train(epoch, net, trainloader)
tst_loss = test(epoch, net, testloader)
train_loss.append(trn_loss)
test_loss.append(tst_loss)
print("Epoch : %d , Batch : %2d , train_loss : %.3f,test_loss : %.3f" % (epoch + 1, i + 1, trn_loss, tst_loss))
if (i % 1 == 0):
correct = 0
total = 0
with torch.no_grad():
for i, (datas, labels) in enumerate(testloader):
datas, labels = datas.cuda(), labels.cuda()
outputs = net(datas) # 输出为batch_size x 10 输出的10列都是数值,不是概率
_, predicted = torch.max(outputs.data, dim=1) # _第一个是值的张量(概率),第二个是序号的张量
# 累计数据量
total += labels.size(0)
# 比较有多少个预测正确
correct += (predicted == labels).sum() # 相同为1,不同为0,利用sum()求总和
print(",准确率: {:.3f}%".format(correct / total * 100))
import matplotlib.pyplot as plt
plt.plot(train_loss, label='Training loss')
plt.plot(test_loss, label='Validation loss')
plt.legend() | Yekse/python_learning | 人脸识别/haarcascades/Run.py | Run.py | py | 3,956 | python | en | code | 0 | github-code | 13 |
31896747765 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 21 03:05:27 2020
@author: whorehay
"""
number10sec =0
dope= True
while(dope):
numToadd =0
if (number10sec%360 ==0):
print("fan running")
numToadd = numToadd+60;
if (number10sec%1440 ==0):
print("water running")
numToadd = numToadd+4;
if (number10sec>8640):
print("Reset")
number10sec =0
break
number10sec = number10sec+numToadd
number10sec = number10sec+1
print(8640/360)
print(1440/360) | pequode/class-projects | ec441_intro_to_computer_networks/functions/testsers.py | testsers.py | py | 539 | python | en | code | 0 | github-code | 13 |
12272108210 | def solution(operations):
answer = []
for operation in operations:
cmd_list = operation.split(" ")
_oper , _v = cmd_list[0], int(cmd_list[1])
if _oper == "I":
answer.append(_v)
elif answer:
del answer[answer.index(max(answer) if _v > 0 else min(answer))]
return [max(answer), min(answer)] if answer else [0,0] | DevNimo/https-github.com-DevNimo-BaekJoonHub | 프로그래머스/lv3/42628. 이중우선순위큐/이중우선순위큐.py | 이중우선순위큐.py | py | 377 | python | en | code | 0 | github-code | 13 |
29665042960 | ## This program will calculate hours worked based on a start time and endtime. Also with the option to calculate pay ##
import datetime
#Ask user for starting time
def start_time():
while True:
try:
a = datetime.datetime.strptime(input('What is your starting time??\nEnter in HH:MM(AM/PM) format (Ex. 08:00AM): '), "%I:%M%p")
##print(a.strftime("%I:%M%p"))
return a
break
except:
print('Please enter correct time in HHMM format')
#Ask user how long lunch break is
def lunch_time():
while True:
try:
time = int(input("How long is your lunch break (in minutes)? "))
return time
break
except:
print('Please enter an integer value')
#Ask user for end time
def end_time():
while True:
try:
a = datetime.datetime.strptime(input('What is your ending time??\nEnter in HH:MM(AM/PM) format (Ex. 08:00AM): '), "%I:%M%p")
##print(a.strftime("%I:%M%p"))
return a
break
except:
print('Please enter correct time in HHMM format')
def calc_day_pay(timedelta):
#convert timedelta object to hours
hours = timedelta.seconds / 3600
rate = float(input("Hourly Rate? "))
overtime_rate = 0
#convert pay according to hours worked and if any overtime
if hours > 8:
overtime_rate = rate * 1.5
return round((rate*8 + overtime_rate * (hours-8)),2)
else:
return round(hours * rate,2)
#Rerun program
def rerun():
rerun = ' '
while rerun[0] != 'y' and rerun[0] != 'n':
rerun = input("Rerun program? (y/n) ").lower()
if rerun[0] == 'y':
return True
else:
return False
while True:
startime = start_time()
lunch_time_object = lunch_time()
endtime = end_time()
#Total time worked
difference_time = endtime - startime - datetime.timedelta(minutes=lunch_time_object)
difference_time_formatted = str(difference_time).split(':')
print(difference_time_formatted[0] , 'hours', difference_time_formatted[1], 'minutes worked')
#Ask user if he wants pay calculated
calc_pay = ' '
while calc_pay[0] != 'y' and calc_pay[0] != 'n':
calc_pay = input("Calculate Pay (Net)? (y/n) ").lower()
if calc_pay[0] == 'y':
day_pay = calc_day_pay(difference_time)
print('Here is your calculated pay, assuming 5 days per week, 4 weeks per month, and 12 months per year')
print('Daily Pay: ', day_pay)
print('Weekly Pay: ', day_pay*5)
print('Monthly Pay: ', day_pay*5*4)
print('Yearly Pay: ', day_pay*5*4*12)
if not rerun():
print("Have a Nice Day")
break
| yseki12/personal_projects | CalcWorkHoursandPay.py | CalcWorkHoursandPay.py | py | 2,623 | python | en | code | 0 | github-code | 13 |
22322375918 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from urllib.parse import urlparse
import requests
import scrapy
URLS = [
"http://my.yoolib.com/mht/collection/2008-yacht-auxiliaire-de-10-5-metres/?n=2131"
]
DATA_DIR = "/home/jean-baptiste/mht_files"
TRANSLATE_TABLE = {
"\xa0": "Dimensions du document"
}
class PictureSpider(scrapy.Spider):
FILENAME_TRANS_TAB = str.maketrans(*["/\0", "__"])
name = "picture"
start_urls = URLS
def parse(self, response):
title_selector = '//h1[@class="title"]/text()'
title = response.xpath(title_selector).extract_first()
img_src_selector = '#yoolib_img img::attr(src)'
img_src = response.css(img_src_selector).extract_first()
parsed_url = urlparse(img_src)
pos = parsed_url.query.find("FIF=")
pos_2 = parsed_url.query.find("&", pos)
if pos != -1:
file_url = parsed_url.query[pos + 4: pos_2 if pos_2 != -1 else len(parsed_url.query)]
download_url = "http://" + parsed_url.netloc + file_url
PictureSpider.download_picture(download_url, title)
else:
raise Exception("Parameter FIF not found")
@staticmethod
def download_picture(picture_url, title):
file_name = DATA_DIR + '/' + title.translate(PictureSpider.FILENAME_TRANS_TAB) + '.tif'
with open(file_name, 'wb') as handle:
response = requests.get(picture_url, stream=True)
for block in response.iter_content(1024):
handle.write(block)
| spanska/yoolib-scrapper | spiders/picture_spider.py | picture_spider.py | py | 1,543 | python | en | code | 0 | github-code | 13 |
73655943057 | import torch
def evaluate(model, loss_func, test_dl):
model.eval()
running_loss = 0.0
correct_labels = 0
total_labels = 0
predictions = []
probabilities = []
labels = []
with torch.no_grad():
for data in test_dl:
inputs, label = data
outputs = model(inputs)
probability, prediction = torch.max(outputs, 1)
predictions.append(prediction.item())
probabilities.append(probability.item())
labels.append(label.item())
loss = loss_func(outputs, label).item()
running_loss += loss
correct_labels += (prediction == label).sum().item()
total_labels += prediction.shape[0]
acc = correct_labels / total_labels
return {
'accuracy': acc,
'loss': loss,
'predictions': predictions,
'probabilities': probabilities,
'labels':labels
}
# def create_pr_curve(class_index, test_probs, test_label, global_step=0):
# '''
# Takes in a "class_index" from 0 to 9 and plots the corresponding
# precision-recall curve
# '''
# tensorboard_truth = test_label == class_index
# tensorboard_probs = test_probs[:, class_index]
# writer.add_pr_curve(classes[class_index],
# tensorboard_truth,
# tensorboard_probs,
# global_step=global_step)
# writer.close() | bjhammack/predict-at-bat-outcome | src/predict_at_bat_outcome/model/test.py | test.py | py | 1,489 | python | en | code | 1 | github-code | 13 |
27831006445 | from django.contrib import messages
from django.http import request
from django.http.response import Http404, HttpResponse
from django.shortcuts import redirect, render
from blog.models import *
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
import datetime
from django.core.mail import send_mail
import math, random
# Create your views here.
def index(request):
week_ago = datetime.date.today() - datetime.timedelta(days = 7)
post=Post.objects.all()
trends = Post.objects.filter(date_time__gte = week_ago).order_by('-read')
cat=Category.objects.all()
params={
'post':post,
'trends':trends,
'cat':cat,
'pop_post': Post.objects.order_by('-read'),
'recent': Post.objects.order_by('-date_time'),
}
return render(request,'index.html',params)
def contact(request):
if request.method=="POST":
name=request.POST['name']
email=request.POST['email']
phone=request.POST['phone']
content =request.POST['message']
if len(name)<2 or len(email)<3 or len(phone)<10 or len(content)<4:
messages.error(request, "Please fill the form correctly")
else:
contact=Contact(name=name, email=email, phone=phone, content=content)
contact.save()
messages.success(request, "Your message has been successfully sent")
return render(request, "contact.html")
def post(request,slug):
post=Post.objects.filter(slug=slug).first()
comments= BlogComment.objects.filter(post=post)
context={'post':post, 'comments': comments, 'user': request.user}
return render(request,'post.html',context)
def search(request):
query=request.GET['query']
if len(query)>78:
allPosts=Post.objects.none()
else:
allPosts= Post.objects.filter(title__icontains=query)
if allPosts.count()==0:
messages.error(request, "No search results found. Please refine your query.")
params={'allPosts': allPosts, 'query': query}
return render(request, 'search.html', params)
def latestpost(request):
post=Post.objects.order_by('-date_time')[:10]
return render(request,'latest.html',{'post':post})
def trending(request):
week_ago = datetime.date.today() - datetime.timedelta(days = 7)
post = Post.objects.filter(date_time__gte = week_ago).order_by('-read')
return render(request,"trending.html",{'post':post})
def postComment(request):
if request.method == "POST":
comment=request.POST.get('comment')
user=request.user
postSno =request.POST.get('postSno')
post= Post.objects.get(sno=postSno)
comment=BlogComment(comment= comment, user=user, post=post)
comment.save()
messages.success(request, "Your comment has been posted successfully")
return redirect(f"/blog/{post.slug}")
else:
messages.ERROR(request,"Not Found")
return redirect('/')
def category(request, url):
cat = Category.objects.get(slug=url)
posts = Post.objects.filter(category=cat)
return render(request, "category-single.html", {'cat': cat, 'posts': posts})
def allcategory(request):
cat=Category.objects.all()
return render(request,'category.html',{'cat':cat})
def login_page(request):
return render(request,'user/login-page.html')
def register_page(request):
return render(request,'user/register-page.html')
def handleSignUp(request):
if request.method=="POST":
# Get the post parameters
username=request.POST['username']
email=request.POST['email']
fname=request.POST['fname']
lname=request.POST['lname']
pass1=request.POST['pass1']
# check for errorneous input
if len(username)<10:
messages.error(request, " Your user name must be under 10 characters")
return redirect('home')
if not username.isalnum():
messages.error(request, " User name should only contain letters and numbers")
return redirect('home')
# Create the user
myuser = User.objects.create_user(username, email, pass1)
myuser.first_name= fname
myuser.last_name= lname
myuser.save()
messages.success(request, " Your Account has been successfully created! NOW YOU CAN LOGIN!")
return redirect('login')
else:
return HttpResponse("404 - Not found")
def handeLogin(request):
if request.method=="POST":
# Get the post parameters
loginusername=request.POST['loginusername']
loginpassword=request.POST['loginpassword']
user=authenticate(username= loginusername, password= loginpassword)
if user is not None:
login(request, user)
messages.success(request, "Successfully Logged In")
return redirect("home")
else:
messages.error(request, "Invalid credentials! Please try again")
return redirect("login-page")
return HttpResponse("404- Not found")
def handelLogout(request):
logout(request)
messages.success(request, "Successfully logged out")
return redirect('home')
| MayankBalyan/My-Personal-Website | blog/views.py | views.py | py | 5,255 | python | en | code | 0 | github-code | 13 |
1589607302 | """
堆排序:
"""
def heapify(arr, n, i):
largest = i
l = 2 * i + 1 # left = 2*i + 1
r = 2 * i + 2 # right = 2*i + 2
# 主要是比较出 left - root - right 谁最大, 并最终将最大的值赋给root
# 比较root和左节点大小, 如果左节点大, 就将largest指针赋值为左节点
if l < n and arr[i] < arr[l]:
largest = l
# 然后比较最大值和右节点比较, 如果右节点大, 就将largest指针赋值为右节点
if r < n and arr[largest] < arr[r]:
largest = r
# 以上两步骤, 主要是拿出最大堆的root值
# 如果最大值和root的指针不相等, 就将最大值和root互换位置
if largest != i:
arr[i], arr[largest] = arr[largest], arr[i] # 交换最大值和root
# 当前largest指针所代表的值并不是最大值【已发生过交换】, 然后再去比较largest的二叉树
heapify(arr, n, largest)
def heap_sort(arr: list):
n = len(arr)
if n < 2:
return arr
else:
# Build a maxheap.构建最大堆
for i in range(n, -1, -1):
heapify(arr, n, i)
# 然后逐个和0去做比较并交换
for i in range(n - 1, 0, -1):
arr[i], arr[0] = arr[0], arr[i] # 交换
heapify(arr, i, 0)
| Abeautifulsnow/python_learning | sort_algorithm/堆排序.py | 堆排序.py | py | 1,312 | python | zh | code | 0 | github-code | 13 |
27079218695 | #!/usr/bin/env skeleton
# -*- coding: utf-8 -*-
#%%
"""
Created on Mon Sep 19 21:30:44 2022
@author: chenqu
"""
import numpy as np
import pandas as pd
import scanpy as sc
import scipy as sp
from scipy import stats
from collections import Counter
from collections.abc import Iterable
import rpy2
from rpy2.robjects.packages import importr
from rpy2.robjects.vectors import FloatVector
#%%
"""
function to compute average gene expression in bins along pseudotime
adata: cell adata
bin_no: number of bins to be divided along pseudotime
genes: genes for the computation
pseudotime_col: column in adata.obs where pseudotime is stored
[return]: gene_summary, a dataframe with genes as rows, and pseudotime bins as columns, and averaged gene expression as the data
"""
def bin_expression(adata, bin_no, genes, pseudotime_col):
# define bins
bins = np.linspace(0, 1, bin_no+1)
# get gene expression
x = np.array(adata[:,genes].X.todense())
# get pseudotime
y = np.array(adata.obs[pseudotime_col])
# calculate average gene expression in each bin
gene_summary = pd.DataFrame(columns = bins[:-1], index = genes)
for i in range(gene_summary.shape[1]):
time = bins[i]
select = np.array(bins[i] <= y) & np.array(y < bins[i+1])
gene_summary.loc[:,time]= np.mean(x[select,:],axis=0)
return gene_summary
#%%
"""
function to compute chatterjee correlation of gene expression with pseudotime
adata: cell adata
genes:genes selected to compute the correlation
pseudotime_col: column in adata.obs where pseudotime is stored
[return]: cor_res,
a dataframe with genes as rows,
with cor_res (correlation statistics),
pval (p-value),
adj_pval (p-value adjusted by BH method) as columns
"""
def chatterjee_corr(adata, genes, pseudotime_col, tie_breaking='theoretical'):
from scipy.stats import rankdata
# get gene expression
x = np.array(adata[:,genes].X.todense())
# get pseudotime
y = list(adata.obs[pseudotime_col])
# compute chatterjee correlation
# ref: Sourav Chatterjee (2021) A New Coefficient of Correlation, Journal of the American Statistical Association, 116:536, 2009-2022, DOI: 10.1080/01621459.2020.1758115
if tie_breaking == 'theoretical':
n_cell, n_gene = x.shape
stat = np.zeros(n_gene)
x_sorted = x[np.argsort(y), :]
r = rankdata(x_sorted, method='max', axis=0)
l = n_cell + 1 - rankdata(x_sorted, method='min', axis=0)
stat = 1 - n_cell * np.sum(np.abs(np.diff(r, axis=0)), axis=0) / 2 / np.sum(l * (n_cell - l), axis=0)
'''
for j in range(n_gene):
print(j, '/', n_gene, end='\r', flush=True)
x_j = x_sorted[:, j]
mx = np.reshape(np.repeat(x_j, n_cell) >= np.tile(x_j, n_cell), (n_cell, n_cell))
r = np.sum(mx, axis=1)
l = np.sum(mx, axis=0)
stat[j] = 1 - n_cell * np.sum(np.abs(np.diff(r))) / 2 / np.sum(l * (n_cell - l))
'''
elif tie_breaking == 'random':
# add small perturbation for random tie breaking
np.random.seed(0) # set random seed for consistent results
x = x + np.random.randn(x.shape[0], x.shape[1]) * 1e-15
stat = 1 - np.sum(np.abs(np.diff(rankdata(x[np.argsort(y), :], axis=0), axis=0)), axis=0) * 3 / (x.shape[0] ** 2 - 1)
stat = np.array(stat).flatten()
else:
raise Exception("tie_breaking should be 'theoretical' or 'random'.")
pval = 1 - sp.stats.norm.cdf(stat, loc=0, scale=np.sqrt(2/5/x.shape[0]))
# put results into dataframe cor_res
cor_res = pd.DataFrame({'cor_stat': stat, 'pval': pval})
cor_res.index = genes
# compute adjusted pval using BH method
stats = importr('stats')
cor_res.loc[:,'adj_pval']=stats.p_adjust(FloatVector(cor_res.loc[:,'pval']), method = 'BH')
# sort genes based on adjusted pval
cor_res= cor_res.sort_values(by='adj_pval')
return cor_res
#%%
"""
match function finds positions of first match for elements in small_list in
big_list.
small_list - a list, or numpy.ndarray, or pandas.Series object
big_list - a list, or numpy.ndarray, or pandas.Series object
nomatch - value to include if no match is found, -1 by default
[return] - a list of indices of first matches of small_list in big_list
"""
def match(small_list, big_list, nomatch=-1, sortable=True):
if sortable:
order = np.array(np.argsort(big_list))
big_sorted = np.array(big_list)[order]
small_list = np.array(small_list)
l = np.searchsorted(big_sorted, small_list, side='left')
insert_at_last = l == len(order)
l[insert_at_last] = 0
ifnomatch = insert_at_last | (big_sorted[l]!=small_list)
ret = order[l]
if np.any(ifnomatch):
ret[ifnomatch] = nomatch
else:
ret = np.array([big_list.index(item) for item in small_list])
return ret
#%%
"""
lookup function matches a vector of `lookup_value` to the `match_col` column of
`dataframe` and returns the corresponding values in `result_col` colume of
`dataframe`.
lookup_value - a vector of values to lookup, can be a panda.Series,
numpy.ndarray or list
dataframe - the lookup table/array, can be a pandas.DataFrame, panda.Series or
numpy.ndarray or list
match_col - column of `dataframe` to use as values to look up from, can be an
int or a string, if equal to -1, rownames of `dataframe` are used
result_col - column of `dataframe` to use as the result vector, can be an int
or a string, if equal to -1, rownames of `dataframe` are used, if equals
None, the matched ordering is returned.
[return] - a numpy.ndarray of lookup results, with unmatched positions filled
with NaN.
"""
def lookup(lookup_value, dataframe, match_col=0, result_col=None):
isIterable = isinstance(lookup_value, Iterable)
lookup_value = pd.Series(lookup_value) if isIterable else pd.Series([lookup_value])
dataframe = pd.DataFrame(dataframe)
if type(match_col) is int:
tmp = dataframe.iloc[:, match_col] if match_col >= 0 else dataframe.index
else:
tmp = dataframe[match_col]
if result_col is None:
ret = np.array(match(lookup_value, tmp))
return(ret if isIterable else ret[0])
elif type(result_col) is int:
tmp2 = dataframe.iloc[:, result_col] if result_col >= 0 else dataframe.index
else:
tmp2 = dataframe[result_col]
tmp2 = np.append(tmp2, np.nan)
m = match(lookup_value, tmp, dataframe.shape[0])
return(tmp2[m] if isIterable else tmp2[m][0]) | zktuong/dandelion-demo-files | dandelion_manuscript/utils/_chenqu_utils.py | _chenqu_utils.py | py | 6,665 | python | en | code | 0 | github-code | 13 |
4101846537 | x = int(input(" enter a number : "))
i = 0
lst = list()
while x > 0:
i = x % 2
x = x // 2
# print(i)
lst.append(i)
for num in reversed(lst):
print(num) | omithegr8/lets-code | decimal-to-binary.py | decimal-to-binary.py | py | 164 | python | en | code | 1 | github-code | 13 |
24088473900 | """
binjatron.py
A plugin for Binary Ninja to integrate Binary Ninja with Voltron.
Install per instructions here:
https://github.com/Vector35/binaryninja-api/tree/master/python/examples
Documentation here: https://github.com/snare/binja/blob/master/README.md
Note: requires the current version of Voltron from GitHub here:
https://github.com/snare/voltron
"""
from binaryninja import *
import voltron
from threading import Thread
from voltron.core import Client
from voltron.plugin import api_request
from scruffy import ConfigFile, PackageFile
import sys
log = voltron.setup_logging()
client = Client()
last_bp_addrs = []
last_pc_addr = 0
last_pc_addr_colour = 0
syncing = False
vers = None
slide = 0
notification = None
sync_callbacks = []
mute_errors_after = 3
config = ConfigFile('~/.binjatron.conf', defaults=PackageFile('defaults.yaml'), apply_env=True, env_prefix='BTRON')
config.load()
bp_colour = enums.HighlightStandardColor(config.bp_colour)
pc_colour = enums.HighlightStandardColor(config.pc_colour)
no_colour = enums.HighlightStandardColor(0)
def _get_function(view, address):
func = view.get_function_at(address)
if func is None:
return view.get_function_at(view.get_previous_function_start_before(address))
return func
def sync(view):
global syncing, vers, notification
def build_requests():
return [
api_request('registers', registers=['pc'], block=True),
api_request('breakpoints', block=True),
]
def callback(results=[], error=None):
global last_bp_addrs, last_pc_addr, last_pc_addr_colour, sync_callbacks, mute_errors_after, syncing
if error:
if mute_errors_after > 0:
log_error("Error synchronising: {}".format(error))
elif mute_errors_after == 0:
# Prevent errors from filling up the entire log if the debugger closes and we lose sync
log_alert("Voltron encountered three sync errors in a row. Muting errors until the next succesful sync.")
syncing = False
mute_errors_after -= 1
else:
if(mute_errors_after < 0):
log_info("Sync restored after {} attempts".format(mute_errors_after * -1))
syncing = True
mute_errors_after = 3
if client and len(results):
if results[1].breakpoints:
addrs = [l['address'] - slide for s in [bp['locations'] for bp in results[1].breakpoints] for l in s]
# add colours to all the breakpoints currently set in the debugger
for addr in addrs:
func = _get_function(view, addr)
if func:
func.set_auto_instr_highlight(addr, bp_colour)
# remove colours from any addresses that had breakpoints the last time we updated, but don't now
for addr in set(last_bp_addrs) - set(addrs):
func = _get_function(view, addr)
if func:
func.set_auto_instr_highlight(addr, no_colour)
# save this set of breakpoint addresses for next time
last_bp_addrs = addrs
elif last_bp_addrs:
if (results[1].status == 'success') or (hasattr(results[1], 'message') and 'busy' not in results[1].message.lower()):
# We end up here if the debugger has been closed and re-opened
replace_breakpoints = show_message_box(
'New Session',
'The Voltron instance currently syncing reports no breakpoints set, but breakpoints have been set in Binary Ninja. Restore these breakpoints?',
buttons=enums.MessageBoxButtonSet.YesNoButtonSet)
if replace_breakpoints:
for addr in set(last_bp_addrs):
set_breakpoint(view, addr)
else:
for addr in set(last_bp_addrs):
func = _get_function(view, addr)
if func:
func.set_auto_instr_highlight(addr, no_colour)
last_bp_addrs = []
if results[0].registers:
# get the current PC from the debugger
addr = results[0].registers.values()[0] - slide
# find the function where that address is
func = _get_function(view, addr)
if last_pc_addr:
# update the highlight colour of the previous PC to its saved value
_get_function(view, last_pc_addr).set_auto_instr_highlight(last_pc_addr, last_pc_addr_colour)
# save the PC and current colour for that instruction
last_pc_addr_colour = func.get_instr_highlight(addr)
last_pc_addr = addr
# update the highlight colour to show the current PC
func.set_auto_instr_highlight(addr, pc_colour)
# Run sync callbacks and remove them from the list if specified
for cb, _ in sync_callbacks:
cb(results)
sync_callbacks = filter(lambda cbt: not cbt[1], sync_callbacks)
elif not results[1].breakpoints or (results[0].message == 'No such target'): # Clear the program counter highlight if the program isn't running
if last_pc_addr:
# update the highlight colour of the previous PC to its saved value
_get_function(view, last_pc_addr).set_auto_instr_highlight(last_pc_addr, last_pc_addr_colour)
if not syncing:
try:
log_info("Starting synchronisation with Voltron")
# register for notifications
notification = BinjatronNotification(view)
view.register_notification(notification)
# Start the client
vers = client.perform_request("version")
client.start(build_requests=build_requests, callback=callback)
syncing = True
except:
log_info("Couldn't connect to Voltron")
else:
log_info("Already synchronising with Voltron")
def stop(view):
global syncing, client, slide, notification
if syncing:
log_info("Stopping synchronisation with Voltron")
# clear any colours we've set
if last_pc_addr:
func = _get_function(view, last_pc_addr)
func.set_auto_instr_highlight(last_pc_addr, last_pc_addr_colour)
for addr in last_bp_addrs:
func = _get_function(view, addr)
func.set_auto_instr_highlight(addr, no_colour)
# stop the voltron client
client.stop()
client = Client()
# unregister notifications
view.unregister_notification(notification)
notification = None
syncing = False
slide = 0
else:
log_alert("Not synchronising with Voltron")
def set_breakpoint(view, address):
global vers
try:
if not vers:
vers = client.perform_request("version")
# build a breakpoint set command for the debugger
if 'lldb' in vers.host_version:
cmd = "breakpoint set -a 0x{:x}".format(address + slide)
elif 'gdb' in vers.host_version:
cmd = "break *0x{:x}".format(address + slide)
else:
raise Exception("Debugger host version {} not supported".format(vers.host_version))
# send it
res = client.perform_request("command", command=cmd, block=False)
if res.is_error:
raise Exception("Failed to set breakpoint: {}".format(res))
# update the voltron views
res = client.perform_request("command", command="voltron update", block=False)
# add colour in binja
func = _get_function(view, address)
if func:
func.set_auto_instr_highlight(address, bp_colour)
except:
log_alert("Failed to set breakpoint")
def delete_breakpoint(view, address):
global vers, last_bp_addrs
try:
if not vers:
vers = client.perform_request("version")
# get a list of breakpoints from the debugger and find the one we're after
res = client.perform_request("breakpoints")
bp_id = None
if res.is_success:
for bp in res.breakpoints:
for location in bp['locations']:
if address == location['address'] - slide:
bp_id = bp['id']
break
# build a breakpoint delete command for the debugger
if 'lldb' in vers.host_version:
cmd = "breakpoint delete {}".format(bp_id)
elif 'gdb' in vers.host_version:
cmd = "delete {}".format(bp_id)
else:
raise Exception("Debugger host version {} not supported".format(vers.host_version))
# send it
res = client.perform_request("command", command=cmd, block=False)
if res.is_error:
raise Exception("Failed to delete breakpoint: {}".format(res))
# update the voltron views
res = client.perform_request("command", command="voltron update", block=False)
# remove the breakpoint colour in binja
func = _get_function(view, address)
if func:
func.set_auto_instr_highlight(address, no_colour)
last_bp_addrs = filter(lambda k : k != address, last_bp_addrs)
except:
log_alert("Failed to delete breakpoint")
def set_slide(view, address):
global slide
if 'async' in vers.capabilities:
# if we're using a debugger that supports async, grab the current PC
res = client.perform_request("registers", registers=["pc"], block=False)
pc = res.registers.values()[0]
else:
# otherwise we just have to use the last PC we saved
if last_pc_addr == 0:
log_alert("Your debugger does not support async API access, and Binary Ninja hasn't received any data from it yet. Please run the `voltron update` command in the debugger, or step the debugger, or let it run until it hits a breakpoint so Binjatron can get the register state.")
else:
pc = last_pc_addr
slide = pc - address
# if we have an async debugger, we can update now. otherwise we'll have to wait for the user to step again
if 'async' in vers.capabilities:
client.update()
def clear_slide(view):
global slide
slide = 0
def custom_request(request, args, alert=True):
""" Allows external code to pass arbitrary commands to the voltron client
request: type of request - usually 'command'
args: dict containing keyword arguments for the request
alert: boolean indicating whether errors should result in a popup or simply
log to the console. Defaults to True."""
global vers
client_result = None
try:
if not vers:
vers = client.perform_request("version")
if 'lldb' in vers.host_version or 'gdb' in vers.host_version:
cmd = request
else:
raise Exception("Debugger host version {} not supported".format(vers.host_version))
client_result = client.perform_request(request, **args)
if client_result.is_error:
raise Exception("\"" + cmd + "\": {}".format(client_result))
# update the voltron views
client.perform_request("command", command="voltron update", block=False)
except:
log_info(sys.exc_info()[1])
if alert:
log_alert(request + " failed: " + str(args))
else:
log_info(request + " failed: " + str(args))
# Even if we encountered an exception, we return the results so external code can
# handle the error if necessary.
return client_result
def register_sync_callback(cb, should_delete=False):
""" Allows external code to register a callback to be run upon a succesful sync
cb: function pointer to the callback. Gets `results` as an argument
should_delete: boolean indicating whether the callback should be removed from
the list after a single call. Defaults to False. """
global sync_callbacks
sync_callbacks.append((cb, should_delete))
def sync_state():
""" Return the sync state so that external code can determine whether voltron is currently syncing with binjatron """
return syncing
class BinjatronNotification(BinaryDataNotification):
def __init__(self, view):
self.view = view
def data_written(self, view, offset, length):
log_info("data_written({:x}, {})".format(offset, length))
# get the data that was written
data = view.read(offset, length)
# write it to memory in the debugger
res = client.perform_request("write_memory", address=offset + slide, value=data, block=False)
if not res.is_success:
log_error("Failed to write memory in debugger: {}".format(res))
# update the voltron views
res = client.perform_request("command", command="voltron update", block=False)
def data_inserted(self, view, offset, length):
log_info("data_inserted()")
def data_removed(self, view, offset, length):
log_info("data_removed()")
PluginCommand.register("Voltron: Sync", "", sync)
PluginCommand.register("Voltron: Stop syncing", "", stop)
PluginCommand.register_for_address("Voltron: Breakpoint set", "", set_breakpoint)
PluginCommand.register_for_address("Voltron: Breakpoint clear", "", delete_breakpoint)
PluginCommand.register_for_address("Voltron: Slide set", "", set_slide)
PluginCommand.register("Voltron: Slide clear", "", clear_slide)
| snare/binjatron | __init__.py | __init__.py | py | 13,988 | python | en | code | 159 | github-code | 13 |
70696504659 | # Problem Statement:-
# Take age or year of birth as an input from the user. Store the input in one variable. Your program should detect whether the entered input is age or year of birth and tell the user when they will turn 100 years old. (5 points).
current_year = 2021
last_year = current_year - 99
def age_calcu(a, b, c):
if age > a:
print("Have you come from future, Enter correct inputEnter correct input")
elif age <= b:
print("You are not born, Enter correct input")
elif c == 4:
print(f"You will be turn 100 in {age + 100}")
elif c == 2 or 1:
bornn_year = current_year - age
print(f"You will turn to 100 in {bornn_year+100}")
age = int(input("Enter your age or Enter your birth year\n"))
p = len(str(age))
if p == 2 or p == 1:
age_calcu(98, 0, p)
elif p == 4:
age_calcu(current_year, last_year, p)
elif p == 3:
print("Have you come from future")
boool = input("Do you want to see your age in random years\n")
if boool == "yes" or boool == "Yes":
random_year = int(input("Tell the year you want to see your age\n"))
born_year = current_year - age
if p == 2:
print(f"You will turn {random_year - born_year} in {random_year}")
elif p == 4:
print(f"You will turn {random_year - age} in {random_year}")
else:
print("Thanks for visiting my code")
#
# Here are a few instructions that you must have to follow:
#
# Do not use any type of module like DateTime or date utils. (-5 points)
# Users can optionally provide a year, and your program must tell their age in that particular year. (3points)
# Your code should handle all sorts of errors like : (2 points)
# You are not yet born
# You seem to be the oldest person alive
# You can also handle any other errors, if possible!
| anant-harryfan/Python_basic_to_advance | PythonTuts/Python_Practise/Practise1.py | Practise1.py | py | 1,796 | python | en | code | 0 | github-code | 13 |
26790194421 | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def sortList(self, head: Optional[ListNode]) -> Optional[ListNode]:
sorted_list = []
while head:
sorted_list.append(head.val)
head = head.next
sorted_list = sorted(sorted_list)
cur = dummy = ListNode()
for element in sorted_list:
cur.next = ListNode(val=element)
cur = cur.next
return dummy.next
| forestphilosophy/LeetCode_solutions | Interview Questions/sort_list.py | sort_list.py | py | 640 | python | en | code | 0 | github-code | 13 |
31993141125 | """rainforest URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from .views import *
urlpatterns = [
path('admin/', admin.site.urls),
path('', root_path),
path('home/', home_page, name="home"),
path('home/<int:id>', product_display, name='product_details'),
path('home/new', new_product_page, name='new_product'),
path('home/<int:id>/edit/', edit_product, name='edit_product'),
path('home/<int:id>/delete', delete_product, name='delete_product'),
path('home/<int:id>/reviews/create', review_product, name='review_product'),
path('home/<int:product_id>/reviews/<int:review_id>', edit_review, name='edit_review'),
path('home/<int:product_id>/reviews/<int:review_id>/delete', delete_review, name='delete_review'),
]
| jessiicacmoore/django-rainforest | rainforest/urls.py | urls.py | py | 1,386 | python | en | code | 0 | github-code | 13 |
4973818106 | class Solution:
def isValid(self, s: str) -> bool:
#pair1 = ['{', '[', '(']
#pair2 = ['}', ']', ')']
pair = {'}': '{', ']': '[', ')': '('}
stack = []
for i in s:
'''
if i in pair1:
stack.append(i)
elif i in pair2 and len(stack)>0 and stack[-1] == pair1[pair2.index(i)]:
del stack[-1]
else:
return False
'''
if i in pair and len(stack)>0 and stack[-1] == pair[i]:
del stack[-1]
else:
stack.append(i)
if len(stack) == 0:
return True
else:
return False
| yeos60490/algorithm | leetcode/easy/valid_parentheses.py | valid_parentheses.py | py | 732 | python | en | code | 0 | github-code | 13 |
2359221682 | from unittest import TestCase, skip
from src.dynamic.mutator import *
import types
class MutatorTest(TestCase):
def setUp(self):
self.mutator = Mutator()
def test_add_new_method(self):
mycode = '''def duration(env):
while True:
print('we are at moment %d' % env.now)
duration = 10
yield env.timeout(duration)
'''
self.mutator.compile_code(mycode)
context = self.mutator.get_context()
self.assertIn('duration', context)
selector = context['duration']
self.assertTrue(isinstance(selector, types.FunctionType))
def test_code_signature(self):
mycode = '''def duration(env):
while True:
print('we are at moment %d' % env.now)
duration = 10
yield env.timeout(duration)
'''
signature = self.mutator.get_signature_string(mycode)
self.assertEquals('duration', signature)
def test_add_new_method_and_reset(self):
mycode = '''def stub(env):
return 34
'''
self.mutator.compile_code(mycode)
context = self.mutator.get_context()
self.assertIn('stub', context)
context = self.mutator.get_context(True)
self.assertNotIn('stub', context)
def test_define_method(self):
mycode = ''' return 34'''
self.mutator.define_new_method('stub', [], mycode)
context = self.mutator.get_context()
self.assertIn('stub', context)
selector = context['stub']
self.assertTrue(isinstance(selector, types.FunctionType))
self.assertEquals(34, selector())
def test_with_arg_list(self):
mycode = ''' return param '''
self.mutator.define_new_method('stub', ['param'], mycode)
context = self.mutator.get_context()
self.assertIn('stub', context)
selector = context['stub']
param = 24
self.assertEquals(param, selector(param))
def test_with_variable_arg_list(self):
mycode = ''' return args '''
self.mutator.define_new_method('stub', [], mycode, variable_args=True)
context = self.mutator.get_context()
self.assertIn('stub', context)
selector = context.get_selector('stub')
param = (34, 21)
self.assertEquals(param, selector(*param))
another_param = ('324', 'test', 3)
self.assertEquals(another_param, selector(*another_param))
def test_create_class(self):
class_name = 'TestClass'
test_class = self.mutator.create_class(class_name)
self.assertEquals(class_name, test_class.__name__)
self.assertEquals((object,), test_class.__bases__)
def test_create_class_with_methods(self):
mycode = ''' return 134 '''
self.mutator.define_new_method('test_method', [], mycode, instance_bound=True)
# Finally create the class
class_name = 'TestClass'
test_class = self.mutator.create_class(class_name, bound_context=True)
self.assertEquals(class_name, test_class.__name__)
self.assertEquals((object,), test_class.__bases__)
self.assertEquals(134, test_class().test_method())
def test_set_method_to_instance(self):
class A(object):
pass
instance = A()
mycode = ''' return 432 '''
new_selector = self.mutator.define_instance_method(instance, 'test_method', [], mycode)
self.assertTrue(isinstance(new_selector, types.FunctionType))
self.assertTrue(isinstance(instance.test_method, types.FunctionType))
self.assertEqual(instance.test_method, new_selector)
self.assertEqual(432, instance.test_method())
| bossiernesto/simulacion-dsl | test/test_mutator.py | test_mutator.py | py | 3,675 | python | en | code | 0 | github-code | 13 |
27396560774 | import matplotlib
from matplotlib import pyplot
matplotlib.use("Qt4Agg", warn=False)
pyplot.rcParams.update({'font.size': 22})
from scipy import special
import numpy
kpc2cm = 3.08568e+21
g2msun = 5.02785e-34
def p2(a):
return ((a)*(a))
def p3(a):
return ((a)*(a)*(a))
def analytical_mass(r, rho0, rc, rcut, beta):
r2 = p2(r)
rc2 = p2(rc)
rcut2 = p2(rcut)
sqrt2 = numpy.sqrt(2)
A = (rc2 - rcut2)*(numpy.log(rcut2 - sqrt2*rcut*r + r2) \
- numpy.log(rcut2 + sqrt2*rcut*r + r2))
Bplus = 2*(rc2 + rcut2)*numpy.arctan(1 + sqrt2*r/rcut)
Bmin = 2*(rc2 + rcut2)*numpy.arctan(1 - sqrt2*r/rcut)
# NB the paper is slightly different: equation 2 does not contain 4*pi*rho
M_gas_below_r = (4 * numpy.pi * rho0) *\
rc2*p3(rcut)/(8*(p2(rcut2)+p2(rc2))) *\
(sqrt2*(A - Bmin + Bplus) - 8*rc*rcut*numpy.arctan(r/rc))
return M_gas_below_r
def analytical_density(r, rho0, rc, rcut, beta):
rho_gas = rho0 * (1 + p2(r/rc))**(-1.5*beta)
rho_gas /= (1 + p3(r/rcut) * (r/rcut))
return 4*numpy.pi*p2(r)*rho_gas
r_list = []
rho_list = []
r_spline_list = []
rho_spline_list = []
Mr_numerical_list = []
Mr_twothirds_list = []
found_spline = False
with open("test.txt", "r") as f:
for i, line in enumerate(f.readlines()):
if "END OF TABLE" in line:
found_spline = True
continue
columns = line.split(",")
r = columns[0].split("=")[-1]
rho = columns[1].split("=")[-1]
if not found_spline:
r_list.append(float(r))
rho_list.append(float(rho))
else:
r_spline_list.append(float(r))
rho_spline_list.append(float(rho))
pyplot.figure(figsize=(12, 9))
pyplot.loglog(r_list, rho_list, label="rho spline")
pyplot.loglog(r_spline_list, rho_spline_list, label="integrated spline mass")
pyplot.loglog(r_spline_list, analytical_mass(numpy.array(r_spline_list), 1, 25, 1200, 1/1.5), label="analytical 2/3 mass")
pyplot.loglog(r_spline_list, analytical_density(numpy.array(r_spline_list), 1, 25, 1200, 1/1.5), label="analytical 2/3 density")
pyplot.legend(loc="best")
pyplot.show()
| tlrh314/CygnusAMerger | plot_test.py | plot_test.py | py | 2,152 | python | en | code | 0 | github-code | 13 |
34787125758 | #!/usr/bin/env python3.6
"""
Provides ability to start a VM (with provided metadata), list available VMs and kill an existing VM.
"""
from __future__ import with_statement
import sys
import os
import logging
import subprocess
import traceback
# openstack source: https://github.com/openstack/openstacksdk/tree/master/openstack/network/v2
import openstack
from novaclient import client
#####################################################
# OpenStack parameters
#####################################################
SNAPSHOT_NAME = "PC_deployment_base"
FLAVOR = "m2.medium"
KEYPAIR_NAME = 'PCMain'
NETWORK_NAME = "TestPC"
KID_NETWORK_NAME = "Kidnet External"
EXCLUDE_SERVER_PREFIX = "PC_deployment"
SECURITY_GROUPS = ["default", "ingress_cidr_local_tcp_8080","ingress_cidr_local_tcp_8090"]
OS_TENANT_NAME="HSC_CCM_PhenoTips"
#####################################################
# script parameters
SERVER_LIST_FILE_NAME = "server_list.txt"
DEFAULT_BRANCH_NAME = 'master'
# list of supported projects, and repositories needed to build each project
PROJECTS = { "PhenomeCentral": { "pn": "Patient Network",
"rm": "Remote Matching",
"pc": "PhenomeCentral",
},
"PhenoTips": { "pt": "PhenoTips"
}
}
def script(settings):
# Initialize and turn on debug openstack logging
openstack.enable_logging(debug=True)
logging.info("Initialize and turn on debug openstack logging")
# Connection
credentials = get_credentials()
logging.info("Got OpenStack credentials {0}".format(credentials))
conn = openstack.connect(**credentials)
logging.info("Connected to OpenStack")
if settings.action == 'list':
list_servers(conn)
sys.exit(0)
if settings.action == 'deploy':
# check if a custom build name should be set (only when deploying)
all_default_branches = True
settings.branch_names = {}
for repo in PROJECTS[settings.project].keys():
settings.branch_names[repo] = getattr(settings, repo + "_branch_name");
if settings.branch_names[repo] != DEFAULT_BRANCH_NAME:
all_default_branches = False
if settings.build_name == DEFAULT_BRANCH_NAME and not all_default_branches:
settings.build_name = "_".join(settings.branch_names.values())
logging.info("Setting build name to {0}".format(settings.build_name))
# find if there already exists a VM with the build name
server = conn.compute.find_server(settings.build_name)
# if a VM with the same build name already exists - delete it
if server:
logging.info("Server for build %s exists, deleting server.........." % settings.build_name)
conn.compute.delete_server(server, ignore_missing=True, force=True)
conn.compute.wait_for_delete(server)
logging.info("Server %s deleted" % settings.build_name)
if settings.action == 'delete':
sys.exit(0)
server = create_server(conn, settings)
add_floatingip(conn, server)
def add_floatingip(conn, server):
logging.info("Assigning floating IPs..........")
fip = get_floating_ip(conn)
retcode = subprocess.call(['openstack', 'server', 'add', 'floating', 'ip', server.name, fip.floating_ip_address])
if retcode != 0:
logging.error('Error: assiging floating_ip_address {0} failed'.format(fip.floating_ip_address))
sys.exit(-4)
else:
logging.info("-- FLOATING IP ASSOCIATED: {0}".format(fip))
def create_server(conn, settings):
image = conn.compute.find_image(SNAPSHOT_NAME)
flavor = conn.compute.find_flavor(FLAVOR)
network = conn.network.find_network(NETWORK_NAME)
keypair = conn.compute.find_keypair(KEYPAIR_NAME)
sgroups = []
for group in SECURITY_GROUPS:
sgroup = conn.network.find_security_group(group)
if sgroup is not None:
sgroups.append({"name": sgroup.name})
else:
logging.error("Security group {0} not found".format(group))
# keep going, this is a minor error
metadatau = {}
metadatau['pr'] = settings.project
metadatau['bn'] = settings.build_name
for repo in PROJECTS[settings.project].keys():
metadatau[repo] = settings.branch_names[repo]
logging.info("Setting VM metadata to {0}".format(str(metadatau)))
logging.info("Creating a new VM..........")
try:
server = conn.compute.create_server(
name=settings.build_name, image_id=image.id, flavor_id=flavor.id,
networks=[{"uuid": network.id}], security_groups=sgroups,
key_name=keypair.name, metadata=metadatau)
# Wait for a server to be in a status='ACTIVE'
# interval - Number of seconds to wait before to consecutive checks. Default to 2.
# wait - Maximum number of seconds to wait before the change. Default to 120.
server = conn.compute.wait_for_server(server, interval=30, wait=1200)
return server
except:
logging.info("-- FAILED TO START A VM (timeout?)")
server = conn.compute.find_server(settings.build_name)
if server:
logging.info("-- STATUS: {0}".format(server.status))
else:
logging.info("-- VM with name {0} not found".format(settings.build_name))
sys.exit(-3)
def list_servers(conn):
# openstack server list
servers_list = conn.compute.servers()
logging.info("List: {0}".format(str(servers_list)))
data = {'servers' : [], 'usage' : {}}
for server in servers_list:
#logging.info(server.to_dict())
ipf = ''
if server.status != 'BUILD' and NETWORK_NAME not in server.addresses.keys():
# exclude servers not in the PC deployment network
continue
if server.name.startswith(EXCLUDE_SERVER_PREFIX):
# exclude the frontend itself, and any other development servers
continue
logging.info("Listing server : {0}".format(server.name))
if NETWORK_NAME in server.addresses.keys():
for address in server.addresses[NETWORK_NAME]:
if address['OS-EXT-IPS:type'] == 'floating':
ipf = address['addr']
else:
ipf = "not assigned"
data['servers'].append({'id' : server.id, 'name' : server.name, 'ip' : ipf, 'created' : server.created_at, 'status' : server.vm_state, 'metadata' : server.metadata})
# Get CPU and memory usage stats via nova
credentials = get_credentials()
credentials['version'] = 2
nova = client.Client(**credentials)
logging.info("Authorised with nova")
usage = nova.limits.get("HSC_CCM_PhenoTips").to_dict()
logging.info("Got usage info")
logging.info(usage)
data['usage'] = usage['absolute']
data['usage']['totalRAMUsed'] = round(data['usage']['totalRAMUsed'] / 1024)
data['usage']['maxTotalRAMSize'] = round(data['usage']['maxTotalRAMSize'] / 1024)
# Add flavor required VCPUs number and RAM to spin one more server
flavor = conn.compute.find_flavor(FLAVOR)
flavor = nova.flavors.get(flavor.id)
data['usage']['requiredRAM'] = round(flavor.ram / 1024)
data['usage']['requiredCores'] = flavor.vcpus
data['usage']['requiredDisc'] = flavor.disk
print(data, file=open(SERVER_LIST_FILE_NAME, "w"))
# Retrieves an un-associated floating ip if available (once that dont have Fixed IP Address), or allocates 1 from pool
def get_floating_ip(conn):
kid_network = conn.network.find_network(KID_NETWORK_NAME)
fip = conn.network.find_available_ip()
if fip:
logging.info('FLOATING IP: {0}'.format(fip))
else:
# Create Floating IP
fip = conn.network.create_ip(floating_network_id=kid_network.id)
logging.info("->CREATED FLOATING IP: {0}".format(fip))
return fip
# get credentials from Environment Variables set by running HSC_CCM_PhenoTips-openrc.sh
def get_credentials():
logging.info("Environment variables: OpenStack username: [{0}]".format(os.environ['OS_USERNAME']))
logging.info("Environment variables: OpenStack URL: [{0}]".format(os.environ['OS_AUTH_URL']))
d = {}
d['version'] = os.environ['OS_IDENTITY_API_VERSION']
d['username'] = os.environ['OS_USERNAME']
d['api_key'] = os.environ['OS_PASSWORD']
d['auth_url'] = os.environ['OS_AUTH_URL']
d['project_name'] = os.environ['OS_PROJECT_NAME']
d['region_name'] = os.environ['OS_REGION_NAME']
d['password'] = os.environ['OS_PASSWORD']
d['user_domain_name'] = os.environ['OS_USER_DOMAIN_NAME']
d['project_domain_name'] = os.environ['OS_PROJECT_DOMAIN_NAME']
return d
def setup_logfile(settings):
if settings.action != 'deploy':
logname = settings.action
web_accessible_log_file = None
else:
logname = settings.build_name
web_accessible_log_file = 'webapps/phenotips/resources/latest_deploy.log'
main_log_file = 'pc_openstack_{0}.log'.format(logname)
format_string = '%(levelname)s: %(asctime)s: %(message)s'
# wipe out existing log files with the same name if exists
open(main_log_file, 'w').close()
# setup logging
logging.basicConfig(filename=main_log_file, level=logging.INFO, format=format_string)
# clone output to console
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(logging.Formatter('[SCRIPT] %(levelname)s: %(message)s'))
logging.getLogger('').addHandler(console)
if web_accessible_log_file is not None:
open(web_accessible_log_file, 'w').close()
# clone output to "latest log" file
web_accessible_log = logging.FileHandler(web_accessible_log_file)
web_accessible_log.setFormatter(logging.Formatter(format_string))
logging.getLogger('').addHandler(web_accessible_log)
def parse_args(args):
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("--action", dest='action', required=True,
help="action that user intented to do: kill a running VM ('delete'), get list of currently running VMs to the 'serever_list.txt' file ('list'), or spin a new one ('deploy')")
parser.add_argument("--project", dest='project',
default=None,
choices=PROJECTS.keys(), help="when deploying, the name of the project to be deployed into a VM (required)")
for project, branches in PROJECTS.items():
for repo, repo_name in branches.items():
parser.add_argument("--" + repo, dest=repo+'_branch_name',
default=DEFAULT_BRANCH_NAME,
help="branch name for " + repo_name + " repo ('{0}' by default)".format(DEFAULT_BRANCH_NAME))
parser.add_argument("--build-name", dest='build_name',
default=DEFAULT_BRANCH_NAME,
help="custom build name (by default '{0}' or '[pn_branch_name]_[rm_branch_name]_[pc_branch_name]') if any of branch names provided)".format(DEFAULT_BRANCH_NAME))
args = parser.parse_args()
if args.action == "deploy" and args.project is None:
parser.error("Deploy actions requires a project to be selected")
return args
def main(args=sys.argv[1:]):
settings = parse_args(args)
setup_logfile(settings)
logging.info('Started deployment with arguments: [' + ' '.join(sys.argv[1:]) + ']')
try:
script(settings)
except Exception:
logging.error('Exception: [{0}]'.format(traceback.format_exc()))
sys.exit(-1)
if __name__ == '__main__':
sys.exit(main()) | phenotips/deployment-tools | scripts/openstack_vm_deploy.py | openstack_vm_deploy.py | py | 11,679 | python | en | code | 1 | github-code | 13 |
30580587255 | #!/usr/bin/env python3
import argparse
import json
import re
import shutil
import time
from datetime import datetime
from os import chdir, fork, remove, rmdir
from sys import argv
def sub(tag_id, entry, body):
tag_string = f"{{- {tag_id} -}}"
return re.sub(tag_string, entry, body)
def modify_readme_content(content, args):
content = sub("name", args.name, content)
content = sub("license", args.license, content)
if args.readme.startswith("cent"):
content = sub("image_alignment", "center", content)
content = re.sub(
r"{- header:center:start -}\n?([\s\S]*?)\n?{- header:center:end -}",
r"\1",
content,
re.DOTALL | re.MULTILINE,
)
else:
content = sub("image_alignment", args.readme, content)
content = re.sub(
r"{- header:left_or_right:start -}\n?([\s\S]*?)\n?{- header:left_or_right:end -}",
r"\1",
content,
re.DOTALL | re.MULTILINE,
)
content = re.sub(
r"{- header[\s\S]*? -}[\s\S]*?{- header[\s\S]*? -}",
"",
content,
re.DOTALL | re.MULTILINE,
)
return content.lstrip("\n")
def modify_license_content(content, args):
content = sub("author", args.author, content)
content = sub("year", str(datetime.now().year), content)
content = sub("program", args.name, content)
content = re.sub(
fr"{{- license:{args.license}:start -}}\n?([\s\S]*?)\n?{{- license:{args.license}:end -}}",
r"\1",
content,
re.DOTALL | re.MULTILINE,
)
content = re.sub(
r"{- license[\s\S]*? -}[\s\S]*?{- license[\s\S]*? -}",
"",
content,
re.DOTALL | re.MULTILINE,
)
return content.strip("\n")
def print_warning():
print("WARNING: This script will delete itself after the generation!!!")
print("3", end=" ", flush=True)
time.sleep(1)
print("2", end=" ", flush=True)
time.sleep(1)
print("1...")
time.sleep(1)
def main():
with open("./config.kickstart", "r") as f:
config = json.load(f)
licenses, readmes = config["license"], config["readmes"]
parser = argparse.ArgumentParser(description="Kickstart your GitHub project!")
parser.add_argument(
"-q",
dest="quiet",
metavar="quiet",
help="suppress warning",
)
parser.add_argument(
"--dir",
"-d",
dest="target_dir",
metavar="path",
help="target directory",
required=False,
)
parser.add_argument(
"--name",
"-n",
dest="name",
metavar="name",
help="your project name",
required=True,
)
parser.add_argument(
"--author",
"-a",
dest="author",
metavar="author",
help="your name",
required=True,
)
parser.add_argument(
"--license",
"-l",
dest="license",
metavar="name",
help="license to be used",
required=True,
choices=licenses,
)
parser.add_argument(
"--readme",
"-r",
dest="readme",
metavar="type",
help="type of README.md to be used",
required=True,
choices=readmes,
)
args = parser.parse_args()
if args.target_dir:
shutil.copytree('.', args.target_dir, dirs_exist_ok=True)
chdir(args.target_dir)
shutil.rmtree("./.git")
if not args.quiet:
print_warning()
with open("./readme.kickstart", "r") as f:
readme_content = f.read()
with open("./license.kickstart", "r") as f:
license_content = f.read()
readme_content = modify_readme_content(readme_content, args)
license_content = modify_license_content(license_content, args)
with open("./README.md", "w") as f:
f.write(readme_content)
with open("./LICENSE", "w") as f:
f.write(license_content)
# remove templates, and the generator
remove("./readme.kickstart")
remove("./license.kickstart")
remove("./config.kickstart")
shutil.rmtree("./.github/workflows")
shutil.rmtree("./.venv")
remove(argv[0])
if __name__ == "__main__":
main()
| poyea/github.init | kickstart.py | kickstart.py | py | 4,225 | python | en | code | 3 | github-code | 13 |
2250427239 | """Training DAgger with an interactive policy that queries the user for actions.
Note that this is a toy example that does not lead to training a reasonable policy.
"""
import tempfile
import gymnasium as gym
import numpy as np
from stable_baselines3.common import vec_env
from imitation.algorithms import bc, dagger
from imitation.policies import interactive
if __name__ == "__main__":
rng = np.random.default_rng(0)
env = vec_env.DummyVecEnv([lambda: gym.wrappers.TimeLimit(gym.make("Pong-v4"), 10)])
env.seed(0)
expert = interactive.AtariInteractivePolicy(env)
bc_trainer = bc.BC(
observation_space=env.observation_space,
action_space=env.action_space,
rng=rng,
)
with tempfile.TemporaryDirectory(prefix="dagger_example_") as tmpdir:
dagger_trainer = dagger.SimpleDAggerTrainer(
venv=env,
scratch_dir=tmpdir,
expert_policy=expert,
bc_trainer=bc_trainer,
rng=rng,
)
dagger_trainer.train(
total_timesteps=20,
rollout_round_min_episodes=1,
rollout_round_min_timesteps=10,
)
| HumanCompatibleAI/imitation | examples/train_dagger_atari_interactive_policy.py | train_dagger_atari_interactive_policy.py | py | 1,163 | python | en | code | 1,004 | github-code | 13 |
4138974020 | import os
from flask import Flask, request, send_from_directory
from flask_socketio import SocketIO, emit
import subprocess
from subprocess import PIPE
from engine_communications import read_board_from_engine, read_possible_moves_from_engine, send_move_to_engine
app = Flask(__name__, static_folder="./build")
DEVELOP = False
options = {} if not DEVELOP else {"cors_allowed_origins": "*"}
socketio = SocketIO(app, **options)
state_map = {}
@socketio.on("connect")
def connect():
print("Connecting")
@socketio.event
def start_game():
print("start_game", request.sid)
engine = subprocess.Popen(r"./chess_ai/target/release/xo_ai", stdin=PIPE, stdout=PIPE)
state_map[request.sid] = engine
board = read_board_from_engine(engine)
print(board)
emit("board", board)
possible_moves = read_possible_moves_from_engine(engine)
print(possible_moves)
emit("possible_moves", possible_moves)
return True
class InvalidMove(Exception):
pass
@socketio.event
def do_move(json):
print("do_move")
engine = state_map[request.sid]
r = json
ok, resp = send_move_to_engine(r["move"], 3, engine)
if not ok:
raise InvalidMove(resp)
emit("board", read_board_from_engine(engine))
emit("board", read_board_from_engine(engine))
emit("possible_moves", read_possible_moves_from_engine(engine))
return True
@app.route("/ping")
def ping():
print("Ping")
return "pong"
# Serve React App
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def serve(path):
print("Serving", path)
if path != "" and os.path.exists(app.static_folder + '/' + path):
return send_from_directory(app.static_folder, path)
else:
return send_from_directory(app.static_folder, 'index.html')
if __name__ == "__main__":
socketio.run("0.0.0.0", 5000)
| AmitAmitSari/chess_front | app.py | app.py | py | 1,853 | python | en | code | 0 | github-code | 13 |
10964348767 | import pygame, sys, copy
from pygame.locals import *
pygame.init()
# WICHTIGE VARIABLEN
res = (720,720)
framerate = 30
winCondition = 5 # = Anzahl der eig. Steine, die in gegn. Basis sein müssen, um zu gewinnen
grid_color = (161,115,81)
player1_color = (255,156,27)
player2_color = (106,199,229)
player_color_select = (255,255,255)
player_color_movable = (255,255,255)
player_color_goals = (206,168,120)
player_color_sep = (173,14,115)
player_color_noGoals = (100,100,100)
player1_color_base = (178,120,45)
player2_color_base = (121,122,123)
black = (0,0,0)
grid_color_background = (55,40,32)
# Game states und Spielphasen
stateInitialize, stateControls, stateGame, stateWin = range(4)
gameState = stateInitialize
phaseChoseMovable, phaseChoseGoal, phaseMove = range(3)
# Unwichtig
screen = pygame.display.set_mode(res)
clockObj = pygame.time.Clock()
surf_win = pygame.font.Font(None, 50).render("GEWONNEN!", True, (0,0,0))
player1_controls1_sf = pygame.font.Font(None, 30).render("Steuerung Spieler 1:", True, player1_color)
player1_controls2_sf = pygame.font.Font(None, 25).render("Auswählen: D Bestätigen: S Zurück: E", True, player1_color)
player2_controls1_sf = pygame.font.Font(None, 30).render("Steuerung Spieler 2:", True, player2_color)
player2_controls2_sf = pygame.font.Font(None, 25).render("Auswählen: Pfeil nach rechts", True, player2_color)
player2_controls3_sf = pygame.font.Font(None, 25).render("Bestätigen: Enter", True, player2_color)
player2_controls4_sf = pygame.font.Font(None, 25).render("Zurück: Backspace", True, player2_color)
screen_controls_sf = pygame.font.Font(None, 30).render("'Leertaste' um fortzufahren", True, grid_color)
# KLASSEN DEFINITIONEN
class Game:
def __init__(self):
# Alle Werte veränderbar!
self.squares_amount = 8 # Wenn < 8, dann Anfangspositionen von player2 anpassen
self.grid_offset = 100
self.margin = 2
self.margin2 = 7
self.square_width = res[0]//(self.squares_amount+4)
self.gridReset()
self.grid_borders = [-1, self.squares_amount]
def gridReset(self):
self.grid_array = [[0 for column in range(self.squares_amount)] for row in range(self.squares_amount)]
def draw(self):
for row in range(self.squares_amount):
for column in range(self.squares_amount):
# Grid
square_posx = self.grid_offset + column*self.square_width + column*self.margin
square_posy = self.grid_offset + row*self.square_width + row*self.margin
square = [square_posx, square_posy, self.square_width, self.square_width]
pygame.draw.rect(screen, grid_color, square)
# Base Player 1
if [column, row] in player1.base:
pygame.draw.rect(screen, player1_color_base, square)
# Base Player 2
if [column, row] in player2.base:
pygame.draw.rect(screen, player2_color_base, square)
# Player 1
for worm in player1.worms:
for pos in worm:
if column == pos[0] and row == pos[1]:
pygame.draw.ellipse(screen, player1.color, [square_posx+self.margin, square_posy+self.margin, self.square_width-2*self.margin, self.square_width-2*self.margin])
#pygame.draw.ellipse(screen, black, [square_posx+self.margin, square_posy+self.margin, self.square_width-2*self.margin, self.square_width-2*self.margin],2)
# Player 2
for worm in player2.worms:
for pos in worm:
if column == pos[0] and row == pos[1]:
pygame.draw.ellipse(screen, player2.color, [square_posx+self.margin, square_posy+self.margin, self.square_width-2*self.margin, self.square_width-2*self.margin])
#pygame.draw.ellipse(screen, black, [square_posx+self.margin, square_posy+self.margin, self.square_width-2*self.margin, self.square_width-2*self.margin],2)
# Phase 1: Movable Positions
if self.grid_array[row][column] == 1:
pygame.draw.ellipse(screen, player_color_movable, [square_posx+self.margin2, square_posy+self.margin2, self.square_width-2*self.margin2, self.square_width-2*self.margin2])
# Phase 2: NORMALE Goal Positions
if self.grid_array[row][column] in [2, 2.3]:
pygame.draw.ellipse(screen, player_color_goals, [square_posx+self.margin, square_posy+self.margin, self.square_width-2*self.margin, self.square_width-2*self.margin])
# Phase 2: Goal Position bei zu durchtrennendem Wurm
if self.grid_array[row][column] in [4, 4.3]:
pygame.draw.ellipse(screen, player_color_sep, [square_posx+self.margin, square_posy+self.margin, self.square_width-2*self.margin, self.square_width-2*self.margin])
# Phase 2: SELEKTIERTE Goal Position
if self.grid_array[row][column] in [2.3, 4.3]:
pygame.draw.ellipse(screen, player_color_select, [square_posx+self.margin, square_posy+self.margin, self.square_width-2*self.margin, self.square_width-2*self.margin],5)
# Phase 2: Keine Goal Positions vorhanden
if self.grid_array[row][column] == 5:
pygame.draw.ellipse(screen, player_color_noGoals, [square_posx+self.margin, square_posy+self.margin, self.square_width-2*self.margin, self.square_width-2*self.margin])
class Worm:
def __init__(self, p1,p2,p3,p4,p5,p6, color):
self.worms = [[p1,p2,p3,p4,p5,p6]]
# self.worms enthält einen oder mehrere Würmer mit Positions-Paaren
# worms[a] = ein ganzer Wurm von ev. mehreren; worms[a][b] = einzelner Stein (x- und y-Pos.) eines Wurms; worms[a][b][c] = x- oder y-Koordinate eines Steines eines Wurms
self.color = color
self.base = copy.deepcopy(self.worms)[0]
def movablePositions(self):
# = Zeige bewegbare Anfangs- und Endsteine von Würmern (=Positionen)
pos = []
for worm in self.worms:
# Nur bewegbar, wenn der Wurm aus mehr als einem Stein besteht
if len(worm) > 1:
pos.extend([worm[0], worm[-1]])
return pos
def goalPositions(self, pos_goal):
# = Zeige Positionen, zu denen der angeklickte Endstein eines Wurms sich hinbewegen könnte
goals = []
worm_recent = None
goals_remove = []
goals_sep1, goals_sep2 = [], []
goal_temp = None
player_recent_pos = []
[player_recent_pos.extend(worm) for worm in player_recent.worms]
player_other_pos = []
[player_other_pos.extend(worm) for worm in player_other.worms]
# 1. SUCHE DAZUGEHÖRIGEN WURM
for worm in self.worms:
for pos in worm:
if pos == pos_goal:
worm_recent = worm
break
worms_withoutRecent = self.getWormsWithoutRecent(worm_recent)
# 2. SUCHE ANDERES ENDE VOM WURM
if worm_recent.index(pos_goal) == 0:
pos_move = worm_recent[-1]
else:
pos_move = worm_recent[0]
# 3. BESTIMME MÖGLICHE ZIELPOSITIONEN
goals = [[pos_goal[0]+1, pos_goal[1]], [pos_goal[0], pos_goal[1]+1], [pos_goal[0]-1, pos_goal[1]], [pos_goal[0], pos_goal[1]-1]]
goals_dir = [[1,0], [0,1], [-1,0], [0,-1]]
for i in range(len(goals)):
adjacentPositions = self.getAdjacent(goals[i], worms_withoutRecent)
# Wenn Pos AUßERHALB SPIELFELD
if goals[i][0] in game.grid_borders or goals[i][1] in game.grid_borders:
goals_remove.append(goals[i])
# Wenn sich Wurm SELBST ÜBERSCHNEIDET
elif goals[i] in worm_recent:
goals_remove.append(goals[i])
# Wenn sich Wurm QUADRAT-FÖRMIG berührt
elif player_recent.getAdjacent(goals[i],[worm_recent])[2] >= 2:
goals_remove.append(goals[i])
# Wenn Wurm GEGNERISCHEN Wurm ÜBERSCHNEIDET
elif goals[i] in player_other_pos:
goal_temp = [ goals[i][0] + goals_dir[i][0], goals[i][1] + goals_dir[i][1] ]
goals_remove.append(goals[i])
# 1. Wenn goal_temp innerhalb Spielfeld
if goal_temp[0] not in game.grid_borders and goal_temp[1] not in game.grid_borders:
# 2. Wenn goal_temp von anderem Spieler nicht besetzt
if goal_temp not in player_other_pos:
# 3. Wenn goal_temp von einem selbst nicht besetzt
if goal_temp not in player_recent_pos:
# 4. Wenn an zu durchfressendem Stein seitlich kein eig. Stein angrenzt
if adjacentPositions[2] == 0:
# 5. Wenn an goal_temp max. 1 eig. Stein angrenzt und dieser Anfang oder Ende seines Wurms ist
adjacentPositions = self.getAdjacent(goal_temp, worms_withoutRecent)
if adjacentPositions[2] <= 1:
if adjacentPositions[0] == None or adjacentPositions[1].index(adjacentPositions[0]) in [0, len(adjacentPositions[1])-1]:
# Durchfressen des gegn. Wurms möglich!
goals_sep1.append(goals[i])
goals_sep2.append(goal_temp)
# Wenn 1 angrenzender eig. Wurm, der nicht mit einem Ende angrenzt
elif adjacentPositions[2] == 1:
if adjacentPositions[1].index(adjacentPositions[0]) not in [0, len(adjacentPositions[1])-1]:
goals_remove.append(goals[i])
# Wenn mind. 2 eigene Würmer angrenzen
elif adjacentPositions[2] >= 2:
goals_remove.append(goals[i])
# Jetzt falsche goals tatsächlich entfernen
[goals.remove(pos) for pos in goals_remove]
return [self.worms.index(worm_recent), pos_move, goals+goals_sep2, goals, goals_sep2, goals_sep1]
def move(self, worm_recent_i, p_end, p_goal):
# = bewegen und ggf. gegn. Wurm durchfressen
# NUR BEWEGEN
if p_goal not in goalPositions[4]:
# 1. Zielstein am entsprechenden Ende einfügen
if self.worms[worm_recent_i].index(p_end) == 0:
# Wenn Zielposition am Ende des aktuellen Wurms
self.worms[worm_recent_i].append(p_goal)
else:
# Wenn Zielposition am Anfang des aktuellen Wurms
self.worms[worm_recent_i].insert(0, p_goal)
# 2. Zu bewegenden Stein löschen
self.worms[worm_recent_i].remove(p_end)
# BEWEGEN + GEGNERISCHEN WURM FRESSEN
else:
goal_sep1_i = goalPositions[4].index(p_goal)
worm_recent_other_i = None
pos_other_del_i = None
# 1. GEGN. STEIN ÜBERNEHMEN UND STEIN DAHINTER PLATZIEREN
# Wenn Zielposition am Ende des aktuellen Wurms
if self.worms[worm_recent_i].index(p_end) == 0:
self.worms[worm_recent_i].extend ( [goalPositions[5][goal_sep1_i], goalPositions[4][goal_sep1_i]] )
else:
# Wenn Zielposition am Anfang des aktuellen Wurms
self.worms[worm_recent_i].insert ( 0, goalPositions[5][goal_sep1_i] )
self.worms[worm_recent_i].insert ( 0, goalPositions[4][goal_sep1_i] )
# Eigenen Endstein löschen
self.worms[worm_recent_i].remove(p_end)
# 2. GEGN. STEIN LÖSCHEN
for worm in player_other.worms:
if goalPositions[5][goal_sep1_i] in worm:
worm_recent_other_i = player_other.worms.index(worm)
pos_other_del_i = player_other.worms[worm_recent_other_i].index(goalPositions[5][goal_sep1_i])
player_other.worms[worm_recent_other_i].remove(goalPositions[5][goal_sep1_i])
# 3. GEGN. WURM DURCHTRENNEN
# Nur, wenn der Stein nicht am Anfang oder Ende des aktuellen Wurms liegt
if pos_other_del_i not in [0, len(player_other.worms[worm_recent_other_i])]:
worms_temp = []
# Anfang
if worm_recent_other_i > 0:
worms_temp.extend(player_other.worms[:worm_recent_other_i])
# Mittelteil
worms_temp.append(player_other.worms[worm_recent_other_i] [:pos_other_del_i])
worms_temp.append(player_other.worms[worm_recent_other_i] [pos_other_del_i:])
# Ende
if worm_recent_other_i < len(player_other.worms)-1:
worms_temp.extend(player_other.worms[worm_recent_other_i+1:])
player_other.worms = worms_temp
def getAdjacent(self, pos, worms):
# = returne alle Würmer, die an einen Stein angrenzen
goals_dir = [[1,0], [0,1], [-1,0], [0,-1]]
pos_adjacent, worm_adjacent = None, None
touching = 0
for direction in goals_dir:
for worm in worms:
if [pos[0]+direction[0], pos[1]+direction[1]] in worm:
pos_adjacent = [pos[0]+direction[0], pos[1]+direction[1]]
worm_adjacent = worm
touching += 1
return [pos_adjacent, worm_adjacent, touching]
def getWormsWithoutRecent(self,worm_recent):
worms_withoutRecent = []
for worm in self.worms:
if worm != worm_recent: worms_withoutRecent.append(worm)
return worms_withoutRecent
def connect(self, pos_new, worm_recent):
# = Eigene Würmer verbinden, wenn sie sich berühren
worms_withoutRecent = self.getWormsWithoutRecent(worm_recent)
adjacentPositions = self.getAdjacent(pos_new, worms_withoutRecent)
# 1. Neu platzierter Stein angrenzend zu eig. anderem Wurm?
if adjacentPositions[2] == 1:
# 2. Neu platzierter Stein Anfang oder Ende seines Wurms?
if self.worms[self.worms.index(worm_recent)].index(pos_new) == 0:
# Anfang
worm1_order = -1
else:
# Ende
worm1_order = 1
# 3. Daran angrenzender eig. Stein Anfang oder Ende des anderen eig. Wurms?
if self.worms[self.worms.index(adjacentPositions[1])].index(adjacentPositions[0]) == 0:
# Anfang, deshalb Reihenfolge lassen
worm2_order = 1
else:
# Ende, deshalb Reihenfolge umdrehen
worm2_order = -1
# 4. Anderen eig. Wurm passend anfügen
worm_temp = worm_recent[::worm1_order] + adjacentPositions[1][::worm2_order]
worm_temp = worm_temp[::worm1_order]
self.worms.remove(adjacentPositions[1])
self.worms[self.worms.index(worm_recent)] = worm_temp
def hasWon(self):
# Spiel gewonnen, wenn mind. 5 eig. Steine in gegn. Basis
baseWorms = 0
for worm in self.worms:
for pos in worm:
if pos in player_other.base:
baseWorms += 1
if baseWorms >= winCondition:
return True
# TO DO:
# (?) Bei zweitem Wurm kann man nicht den gleichen Wurm nochmal steuern
# (?) Design Fehler Till
# Farben
# INITIALISIERUNGEN
while True:
# GAME STATE 1: INITIALIZE
if gameState == stateInitialize:
game = Game() # muss als Erstes initiiert werden!
player1 = Worm( [0,2],[0,1],[0,0],[1,0],[2,0],[3,0], player1_color ) # Reihenfolge der Argumente wichtig! (Steine nebeneinander angeordnet)
#player1 = Worm( [5,5],[4,5],[3,5],[2,5],[1,5],[0,5], player1_color )
player2 = Worm( [7,5],[7,6],[7,7],[6,7],[5,7],[4,7], player2_color )
player_recent = player1
player_other = player2
# Tastenbelegung
select, confirm, back = range(3)
player_keys = {
player1:{
select : pygame.locals.K_RIGHT,
confirm : pygame.locals.K_RETURN,
back : pygame.locals.K_BACKSPACE},
player2:{
select : pygame.locals.K_d,
confirm : pygame.locals.K_s,
back : pygame.locals.K_e}}
# Diverses
movablePositions = None
goalPositions = None
player_turns = 0
counter = -1
counter_last = None
worms_amount = 0
# Zum Controls-Screen übergehen
gameState = stateControls
gamePhase = phaseChoseMovable
# GAME STATE 2: CONTROLS SCREEN
if gameState == stateControls:
clockObj.tick(framerate)
screen.fill(grid_color_background)
screen.blit(player1_controls1_sf, [res[0]/8, res[1]/4])
screen.blit(player1_controls2_sf, [res[0]/20, res[1]/3])
screen.blit(player2_controls1_sf, [res[0]/8*5, res[1]/4])
screen.blit(player2_controls2_sf, [res[0]/20*12, res[1]/3])
screen.blit(player2_controls3_sf, [res[0]/20*12, res[1]/30*11])
screen.blit(player2_controls4_sf, [res[0]/20*12, res[1]/30*12])
screen.blit(screen_controls_sf, [res[0]/3, res[1]/4*3])
pygame.display.update()
for event in pygame.event.get():
if event.type == KEYDOWN and event.key == K_SPACE:
gameState = stateGame
# MAIN-LOOP
while gameState == stateGame:
# GAME STATE 3: GAME
#Framerate
clockObj.tick(framerate)
#Events -> funzt nicht, da später 2. event-Schleife
'''for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()'''
# SPIEL-PHASEN
# PHASE 1: ZU BEWEGENDEN STEIN AUSWÄHLEN
if gamePhase == phaseChoseMovable:
if movablePositions == None:
movablePositions = player_recent.movablePositions()
# MovablePositions in Grid Array eintragen
if movablePositions != []:
game.grid_array[movablePositions[counter][1]][movablePositions[counter][0]] = 1
else:
gameState = stateWin
# Anzahl der Züge festlegen
player_turns_max = len(movablePositions)//2
for event in pygame.event.get():
# 1. Wurm-Ende auswählen
if event.type == KEYDOWN and event.key == player_keys[player_recent][select]:
counter = (counter + 1)%len(movablePositions)
game.gridReset()
game.grid_array [movablePositions[counter][1]] [movablePositions[counter][0]] = 1
# 2. Bestätigen
if event.type == KEYDOWN and event.key == player_keys[player_recent][confirm] and counter > -1:
gamePhase = phaseChoseGoal
break
# PHASE 2: ZIELPOSITION AUSWÄHLEN
if gamePhase == phaseChoseGoal:
if goalPositions == None:
goalPositions = player_recent.goalPositions(movablePositions[counter])
game.gridReset()
# Zielpositionen in Grid eintragen
for pos in goalPositions[2]:
if pos in goalPositions[3]:
# Zielstein am eig. Wurm
game.grid_array[pos[1]][pos[0]] = 2
else:
# Zielstein hinter gegn. Wurm
game.grid_array[pos[1]][pos[0]] = 4
counter_last = counter
if len(goalPositions[2]) != 0: counter = (counter + 1)%len(goalPositions[2])
for event in pygame.event.get():
# Wenn keine Zielpositionen vorhanden: Bewegung nicht möglich
if len(goalPositions[2]) == 0:
game.grid_array[movablePositions[counter_last][1]][movablePositions[counter_last][0]] = 5
else:
# 1. Zielposition auswählen
if event.type == KEYDOWN and event.key == player_keys[player_recent][select]:
counter = (counter + 1)%len(goalPositions[2])
if goalPositions[2][counter] in goalPositions[3]:
game.grid_array[goalPositions[2][counter][1]][goalPositions[2][counter][0]] = 2.3
else:
game.grid_array[goalPositions[2][counter][1]][goalPositions[2][counter][0]] = 4.3
if len(goalPositions[2]) > 1:
if goalPositions[2][counter-1] in goalPositions[3]:
game.grid_array[goalPositions[2][counter-1][1]][goalPositions[2][counter-1][0]] = 2
else:
game.grid_array[goalPositions[2][counter-1][1]][goalPositions[2][counter-1][0]] = 4
# 2. Bestätigen
if event.type == KEYDOWN and event.key == player_keys[player_recent][confirm]:
gamePhase = phaseMove
# 3. Einen Schritt zurück gehen
if event.type == KEYDOWN and event.key == player_keys[player_recent][back]:
game.gridReset()
gamePhase = phaseChoseMovable
movablePositions, goalPositions = None, None
counter = counter_last
# PHASE 3: WURM BEWEGEN
if gamePhase == phaseMove:
# Bewegen
player_recent.move(goalPositions[0],goalPositions[1],goalPositions[2][counter])
# Ev. getrennte Würmer verbinden
player_recent.connect(goalPositions[2][counter], player_recent.worms[goalPositions[0]])
# Ev. Spielzüge verringern
for worm in player_recent.worms:
if len(worm) > 1:
worms_amount += 1
if worms_amount < player_turns_max:
player_turns_max -= 1
# Nächster Zug wenn Spiel nicht gewonnen
if not player_recent.hasWon():
# Alles resetten
game.gridReset()
movablePositions, goalPositions, counter, worms_amount = None, None, 0, 0
# Nächster Zug
player_turns += 1
gamePhase = phaseChoseMovable
else:
gameState = stateWin
# GAME STATE 4: SPIEL GEWONNEN
if gameState == stateWin:
screen.blit(surf_win, [res[0]/2-surf_win.get_width()/2, res[1]/2-surf_win.get_height()/2])
pygame.display.update()
pygame.time.wait(5000)
# Neues Spiel
gameState = stateInitialize
# Spieler wechseln
if player_turns >= player_turns_max:
player_turns = 0
if player_recent == player1:
player_recent = player2
player_other = player1
else:
player_recent = player1
player_other = player2
# Alles zeichnen
screen.fill(grid_color_background)
game.draw()
pygame.display.update()
# FEHLER DOKUMENTATION:
# Verschachtelter Index falsch
# Variable an falscher Stelle initiiert
# Werte der List Comprehension nicht übergeben
# Liste 4 Elemente zugefügt, dann 4er-for-Schleife, die den Index der Liste abfragt und in der Elemente der Liste gelöscht werden -> Fehlermeldung, weil Liste verkürzt und Indizierung nicht mehr möglich
# 2 Eventschleifen hintereinander klappen nicht; variable für events.get() speichert events und kickt sie nicht wieder aus der queue
# 2x vergessen Variablen zu resetten
# Variable nicht zurückgesetzt (player_turns)
# Versucht Attribut von Instanz zu iterieren, dabei Instanz iteriert und Attribut vergessen
# "list index out of range"-Meldung: Komma bei Liste vergessen
# bei verschachtelter Liste zu viele Klammern
# bei x-y-Liste Zahlen verdreht
# Klammer vergessen; Methode nicht auf Instanz angewendet
# Logik-Fehler: den gleichen Stein zwei mal removed, dadurch später Fehlermeldung
# Code-Zeile übersehen, die völligen Humbug berechnet hat
# Probleme mit Bearbeitung von verschachtelten Listen
# Klammer-Fehler bei len()-1
# zur Indizierung Variable des falschen Spielers verwendet
# for-Schleife mit if-Bedingung falsch verwendet
# vergessen beim Durchfressen eig. Endstein zu löschen
# in einer Zeile Liste erstellen ([a,b]), Slicen und Extenden klappt nicht (?)
# Werte bei Methodenaufrufe nicht übergeben
# Methode logisch falsch
# pygame.event.get() verwendet lokale Variablen (z.B. K_RETURN), die nur innerhalb der Event-Schleife gültig sind; Dictionary mit z.B. K_RETURN als Value klappt somit nicht, benötigt pygame.locals.K_RETURN
# Schreibfehler (K_KETURN)
# Methode zu weit eingerückt, Instanz hatte somit kein "hasWon-Attribut"
# Code eingefügt, der kurz danach überschrieben wurde
# Klammer-Fehler bei Indizierung von len() -> "int-Obj not substribtible"
# DURCH PROGRAMMIERUNG DIESES SPIELS TRAINIERTE BEREICHE:
# Logisches Denken
# verschachtelte Listen / Indizierungen
# for-Schleifen
# Prozesse formalisieren
# NÄCHSTES MAL ANDERS MACHEN:
# Übergabewerte von Methoden übersichtlicher sortieren, vllt weniger pro Methode
| Sumpfgulasch/Python-Games | WurmiGame.py | WurmiGame.py | py | 25,516 | python | de | code | 0 | github-code | 13 |
20512767300 | import json
import requests
import os
from PIL import Image
input_file = "clipsubset.json"
with open(input_file, encoding="utf8") as f:
data = json.load(f)
count = 0
testpath = "tst.jpg"
for row in data:
image_url = row['url']
try:
response = requests.get(image_url, stream = True)
if response.status_code == 200:
try:
img = Image.open(response.raw)
path = f'{count}.{img.format}'
img.save(os.path.join(testpath))
img = Image.open(testpath)
img.verify()
img = Image.open(testpath)
img.save(os.path.join('hd', path))
os.remove(testpath)
print('Image sucessfully Downloaded')
count+=1
except Exception as e:
if "seek" in str(e):
raise(e)
print("encountered exception when reading image data from url", e)
else:
print('Image Couldn\'t be retreived')
except Exception as e:
print("encountered connection error", e) | Lewington-pitsos/clip-download | download.py | download.py | py | 1,131 | python | en | code | 7 | github-code | 13 |
28065695396 | import requests
import os
# Your OpenAI API Keys - replace with your own keys
api_keys = ["your-api-key1", "your-api-key2", "your-api-key3", "your-api-key4"]
# Define the name and project for your prompt
name = "人文学"
project = "接力赛跑"
# The text prompt you want to generate a response
prompt = f"你是一名阳光积极向上的{name}院学生,你的学校即将要开运动会,请书写一份通讯稿,为{name}运动员加油。项目:{project}。字数:120字左右。要求:积极上进,富有文采,带有运动项目特色。参考样例:(输出请勿与样例重复)"
# Define the URL for the OpenAI API
url = "https://api.openai.com/v1/chat/completions"
# Define the data to be sent to the API
data = {
"model": "gpt-3.5-turbo",
"messages": [{"role": "user", "content": prompt}],
"max_tokens": 1200,
"temperature": 0.5,
"frequency_penalty": 0,
"presence_penalty": 0
}
# Ensure the output directory exists
os.makedirs('output', exist_ok=True)
# Loop to generate responses
for i in range(1, 2):
# Select an API key
api_key = api_keys[i % len(api_keys)]
# The headers for the API request
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {api_key}"
}
# Make the API request
response = requests.post(url, headers=headers, json=data)
# Check if the request was successful
if response.status_code == 200:
# Extract the generated text from the response
generated_text = response.json()['choices'][0]['message']['content']
print(str(i) + ' request complete')
# Save the text to a file with a numbered name
with open(f'output/{name}院_{project}_{i}.txt', 'w', encoding='utf-8') as file:
file.write(generated_text)
else:
# Handle the error
print(f"Request failed with status code {response.status_code} for iteration {i}")
| RwandanMtGorilla/txtMP | main.py | main.py | py | 1,941 | python | en | code | 6 | github-code | 13 |
27837548418 | from typing import Dict
import cv2 as cv
import logging
import numpy as np
from matplotlib.image import AxesImage
from ghostwriter.paths import DATA_DIR
from ghostwriter.utils import default_arguments, set_up_logging
from ghostwriter.camera.gamma import GammaCorrector
from ghostwriter.camera.keymap import KEY_UP, KEY_DOWN, KEY_LEFT, KEY_RIGHT, QUIT
import matplotlib
from matplotlib import pyplot as plt
from scripts.python_info import is_m1_mac
_WINDOWS: Dict[str, AxesImage] = {}
def on_close(event):
event.canvas.figure.has_been_closed = True
def imshow(window_name: str, opencv_image: np.ndarray):
rgb_image = cv.cvtColor(opencv_image, cv.COLOR_BGR2RGB)
window = _WINDOWS.get(window_name)
if window is None or window.figure.has_been_closed:
window = plt.imshow(rgb_image)
window.figure.has_been_closed = False
window.figure.canvas.mpl_connect("close_event", on_close)
else:
window.set_data(rgb_image)
_WINDOWS[window_name] = window
print("Drawing")
plt.gcf().canvas.draw_idle()
plt.pause(0.0001)
def update_gamma_from_key_press(gamma, key, factor=1.1):
logger = logging.getLogger(__name__)
if key == KEY_UP:
new_gamma = gamma / factor
elif key == KEY_DOWN:
new_gamma = gamma * factor
else:
new_gamma = gamma
logger.debug("gamma = %s", gamma)
return new_gamma
def detect_and_display(frame, face_cascade, eyes_cascade, smile_cascade):
logger = logging.getLogger(__name__)
frame_gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
frame_gray = cv.equalizeHist(frame_gray)
# -- Detect faces
faces = face_cascade.detectMultiScale(frame_gray)
for (x, y, w, h) in faces:
center = (x + w // 2, y + h // 2)
frame = cv.ellipse(frame, center, (w // 2, h // 2), 0, 0, 360, (255, 0, 255), 4)
faceROI = frame_gray[y : y + h, x : x + w]
# -- In each face, detect eyes and smiles
eyes = eyes_cascade.detectMultiScale(faceROI)
smiles = smile_cascade.detectMultiScale(faceROI, 1.8, 20)
for (x2, y2, w2, h2) in eyes:
eye_center = (x + x2 + w2 // 2, y + y2 + h2 // 2)
radius = int(round((w2 + h2) * 0.25))
frame = cv.circle(frame, eye_center, radius, (255, 0, 0), 4)
for (x2, y2, w2, h2) in smiles:
pt1 = (x + x2, y + y2)
pt2 = (x + x2 + w2, y + y2 + h2)
logger.debug("Smile detected!")
frame = cv.rectangle(frame, pt1, pt2, color=(255, 255, 0))
# cv.imshow("Capture - Face detection", frame)
imshow("Capture - Face detection", frame)
def main():
parser = default_arguments(description="Code for Cascade Classifier tutorial.")
parser.add_argument(
"--face_cascade",
help="Path to face cascade.",
default="{}/haarcascades/haarcascade_frontalface_alt.xml".format(DATA_DIR),
)
parser.add_argument(
"--smile_cascade",
help="Path to face cascade.",
default="{}/haarcascades/haarcascade_smile.xml".format(DATA_DIR),
)
parser.add_argument(
"--eyes_cascade",
help="Path to eyes cascade.",
default="{}/haarcascades/haarcascade_eye_tree_eyeglasses.xml".format(DATA_DIR),
)
# Turn on mac
if is_m1_mac():
matplotlib.use("MacOSX")
args = parser.parse_args()
set_up_logging(args.verbose)
logger = logging.getLogger(__name__)
gamma_corrector = GammaCorrector()
face_cascade_name = args.face_cascade
eyes_cascade_name = args.eyes_cascade
smile_cascade_name = args.smile_cascade
face_cascade = cv.CascadeClassifier()
eyes_cascade = cv.CascadeClassifier()
smile_cascade = cv.CascadeClassifier()
# -- 1. Load the cascades
if not face_cascade.load(cv.samples.findFile(face_cascade_name)):
logger.error("Error loading face cascade")
return
if not eyes_cascade.load(cv.samples.findFile(eyes_cascade_name)):
logger.error("Error loading eyes cascade")
return
if not smile_cascade.load(cv.samples.findFile(smile_cascade_name)):
logger.error("Error loading smile cascade")
return
camera_device = args.camera
# -- 2. Read the video stream
cap = cv.VideoCapture(camera_device)
if not cap.isOpened:
logger.error("Error opening video capture")
return
gamma = 1.0
while True:
ret, frame = cap.read()
if frame is None:
logger.error("No captured frame; is your camera available?")
break
detect_and_display(
gamma_corrector.correct(frame, gamma),
face_cascade=face_cascade,
eyes_cascade=eyes_cascade,
smile_cascade=smile_cascade,
)
if __name__ == "__main__":
main()
| mbmccoy/ghostwriter | ghostwriter/camera/examples/smile_detector.py | smile_detector.py | py | 4,803 | python | en | code | 1 | github-code | 13 |
10108907048 | import sys
from unittest import TestCase
from vending_machine import VendingMachine
from io import StringIO
import pytest
class TestVendingMachine(object):
def setup_method(self, method):
print('method{}'.format(method.__name__))
self.vm = VendingMachine()
self.captor = StringIO()
sys.stdout = self.captor
def teardown_method(self, method):
sys.stdout = sys.__stdout__
print('method{}:'.format(method.__name__))
del self.vm
@pytest.mark.parametrize('money', [10, 50, 100, 500, 1000])
def test_insert_available_money(self, capsys, money):
self.vm.insert(money)
assert capsys.readouterr().out == 'changes: 0\n'
@pytest.mark.parametrize('money', [-1, 3, 11])
def test_insert_unavailable_money(self, capsys, money):
self.vm.insert(money)
assert capsys.readouterr().out == 'changes: {}\n'.format(money)
| ki4070ma/vending-machine | test_vending_machine.py | test_vending_machine.py | py | 916 | python | en | code | 0 | github-code | 13 |
74667749457 | from PIL import Image
from IPython.display import display
filepath = ''
# load an image
img = Image.open(filepath)
img = img.convert('L')
w, h = img.size
# create the intensity matrix
img_data = img.getdata()
Img = [[img_data[x + w * y] / 255.0 for x in range(w)] for y in range(h)]
# matrices convolution
def convolution(source, kernel, s_size, k_size):
def _conv(source, kernel, x, y, s_size, k_size_2):
if x < k_size_2 or y < k_size_2 or x >= s_size - k_size_2 or y >= s_size - k_size_2:
return source[x][y]
S = sum([source[y + y2 - k_size_2][x + x2 - k_size_2] * kernel[y2][x2] for x2 in range(k_size_2 * 2 + 1) for y2 in range(k_size_2 * 2 + 1)])
return S
k_size_2 = k_size // 2
C = [[_conv(source, kernel, x, y, s_size, k_size_2) for x in range(s_size)] for y in range(s_size)]
return C
def convolutiontest(Img, n = 1):
KSize = 3
#K = [[0, 0, 0], [0, 1, 0], [0, 0, 0]]
K = [[1, 2, 1], [2, 4, 2], [1, 2, 1]]
#K = [[0, -1, 0], [-1, 5, -1], [0, -1, 0]]
#K = [[1, 0, -1], [0, 0, 0], [-1, 0, 1]]
#K = [[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]]
# normalize kernel
s = sum([sum(K[i]) for i in range(KSize)])
if (s != 0):
K = [[K[y][x] / s for x in range(KSize)] for y in range(KSize)]
c = convolution(Img, K, w, KSize)
showimage(c, w)
if n > 1:
convolutiontest(c, n - 1)
def showimage(array2d, size):
cdata = [255 * array2d[x // size][x % size] for x in range(size ** 2)]
cimage = Image.new("L", (w, h))
cimage.putdata(cdata)
cimage = cimage.convert('RGB')
display(cimage)
display(img)
convolutiontest(Img, 1) | dananas/Convolutions | convolution.py | convolution.py | py | 1,681 | python | en | code | 0 | github-code | 13 |
8841820208 | import os
from decimal import Decimal
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.files.storage import default_storage
from django.test import override_settings
from django.test import TestCase
from helpers.seed import get_or_create_default_image
from product.models.category import ProductCategory
from product.models.product import Product
from vat.models import Vat
languages = [lang["code"] for lang in settings.PARLER_LANGUAGES[settings.SITE_ID]]
default_language = settings.PARLER_DEFAULT_LANGUAGE_CODE
User = get_user_model()
@override_settings(
STORAGES={
"default": {
"BACKEND": "django.core.files.storage.memory.InMemoryStorage",
},
}
)
class CategoryModelTestCase(TestCase):
category = None
sub_category = None
user = None
vat = None
default_image = None
def setUp(self):
self.user = User.objects.create_user(
email="test@test.com", password="test12345@!"
)
self.vat = Vat.objects.create(
value=Decimal("24.0"),
)
# Create or retrieve the default image instance
self.default_image = get_or_create_default_image(
"uploads/categories/no_photo.jpg"
)
self.category = ProductCategory.objects.create(
slug="sample-category",
menu_image_one=self.default_image,
menu_image_two=self.default_image,
menu_main_banner=self.default_image,
)
for language in languages:
self.category.set_current_language(language)
self.category.name = f"Sample Category {language}"
self.category.description = f"Sample Category Description {language}"
self.category.save()
self.category.set_current_language(default_language)
self.sub_category = ProductCategory.objects.create(
slug="sample-sub-category",
parent=self.category,
menu_image_one=self.default_image,
menu_image_two=self.default_image,
menu_main_banner=self.default_image,
)
for language in languages:
self.sub_category.set_current_language(language)
self.sub_category.name = f"Sample Sub Category {language}"
self.sub_category.description = (
f"Sample Sub Category Description {language}"
)
self.sub_category.save()
self.sub_category.set_current_language(default_language)
def test_fields(self):
# Test if the fields are saved correctly
self.assertEqual(self.category.slug, "sample-category")
self.assertTrue(default_storage.exists(self.category.menu_image_one.path))
self.assertTrue(default_storage.exists(self.category.menu_image_two.path))
self.assertTrue(default_storage.exists(self.category.menu_main_banner.path))
def test_verbose_names(self):
# Test verbose names for fields
self.assertEqual(self.category._meta.get_field("slug").verbose_name, "Slug")
self.assertEqual(
self.category._meta.get_field("menu_image_one").verbose_name,
"Menu Image One",
)
self.assertEqual(
self.category._meta.get_field("menu_image_two").verbose_name,
"Menu Image Two",
)
self.assertEqual(
self.category._meta.get_field("menu_main_banner").verbose_name,
"Menu Main Banner",
)
def test_meta_verbose_names(self):
# Test verbose names from the Meta class
self.assertEqual(ProductCategory._meta.verbose_name, "Product Category")
self.assertEqual(
ProductCategory._meta.verbose_name_plural, "Product Categories"
)
def test_unicode_representation(self):
# Test the __unicode__ method returns the translated name
self.assertEqual(
self.category.__unicode__(),
self.category.safe_translation_getter("name"),
)
def test_translations(self):
# Test if translations are saved correctly
for language in languages:
self.category.set_current_language(language)
self.assertEqual(self.category.name, f"Sample Category {language}")
self.assertEqual(
self.category.description, f"Sample Category Description {language}"
)
def test_str_representation_no_parent(self):
self.assertEqual(
str(self.category), self.category.safe_translation_getter("name")
)
def test_str_representation_with_parent(self):
category_name = self.category.safe_translation_getter("name")
expected_str = f"{category_name} / Sample Sub Category" f" {default_language}"
self.assertEqual(str(self.sub_category), expected_str)
def test_str_representation_with_grandparent(self):
grandparent = ProductCategory.objects.create(
slug="grandparent-category",
)
for language in languages:
grandparent.set_current_language(language)
grandparent.name = f"Grandparent Category {language}"
grandparent.description = f"Grandparent Category Description {language}"
grandparent.save()
grandparent.set_current_language(default_language)
self.sub_category.parent = grandparent
self.sub_category.save()
expected_str = (
f"Grandparent Category {default_language} / Sample Sub Category"
f" {default_language}"
)
self.assertEqual(str(self.sub_category), expected_str)
def test_get_ordering_queryset_with_parent(self):
# Verify the order of execution and data setup
self.assertEqual(
ProductCategory.objects.count(), 2
) # Make sure you have 2 categories
self.assertIn(self.sub_category, ProductCategory.objects.all())
self.assertIn(self.category, ProductCategory.objects.all())
# Retrieve the parent queryset
parent_queryset = self.category.get_ordering_queryset()
# Check if the sub-category is present in the parent_queryset
self.assertIn(self.sub_category, parent_queryset)
# Check if the parent category is also present in the parent_queryset
self.assertIn(self.category, parent_queryset)
def test_get_ordering_queryset_without_parent(self):
# Create a new category without a parent
no_parent_category = ProductCategory.objects.create(
slug="no-parent-category",
)
# Retrieve the parent queryset for the category without a parent
parent_queryset = no_parent_category.get_ordering_queryset()
# Check if the parent category is present in the parent_queryset
self.assertIn(no_parent_category, parent_queryset)
# Check if other categories are present in the parent_queryset
self.assertIn(self.sub_category, parent_queryset)
self.assertIn(self.category, parent_queryset)
# Now test if the descendant categories are also present
for descendant in self.category.get_descendants(include_self=True):
self.assertIn(descendant, parent_queryset)
def test_recursive_product_count_no_products(self):
count = self.category.recursive_product_count
self.assertEqual(count, 0)
def test_recursive_product_count_one_product(self):
Product.objects.create(
product_code="P123",
category=self.category,
slug="product-1",
price=Decimal("100.0"),
active=True,
stock=10,
discount_percent=Decimal("0.0"),
vat=Vat.objects.create(value=Decimal("18.0")),
hits=10,
weight=Decimal("1.0"),
)
count = self.category.recursive_product_count
self.assertEqual(count, 1)
def test_recursive_product_count_multiple_products(self):
Product.objects.create(
product_code="P123",
category=self.category,
slug="product-1",
price=Decimal("100.0"),
active=True,
stock=10,
discount_percent=Decimal("0.0"),
vat=Vat.objects.create(value=Decimal("18.0")),
hits=10,
weight=Decimal("1.0"),
)
Product.objects.create(
product_code="P124",
category=self.sub_category,
slug="product-2",
price=Decimal("150.0"),
active=True,
stock=8,
discount_percent=Decimal("0.0"),
vat=Vat.objects.create(value=Decimal("18.0")),
hits=8,
weight=Decimal("1.5"),
)
count = self.category.recursive_product_count
self.assertEqual(count, 2)
def test_absolute_url_no_parent(self):
url = self.category.absolute_url
expected_url = f"/{self.category.slug}"
self.assertEqual(url, expected_url)
def test_absolute_url_with_parent(self):
url = self.sub_category.absolute_url
expected_url = f"/{self.category.slug}/{self.sub_category.slug}"
self.assertEqual(url, expected_url)
def test_category_menu_image_one_absolute_url(self):
# Test category menu image absolute URL
expected_url = settings.APP_BASE_URL + self.category.menu_image_one.url
self.assertEqual(
self.category.category_menu_image_one_absolute_url, expected_url
)
def category_menu_image_one_filename(self):
# Test category menu main image filename
expected_filename = os.path.basename(self.category.menu_image_one.name)
self.assertEqual(
self.category.category_menu_image_one_filename, expected_filename
)
def test_category_menu_image_two_absolute_url(self):
# Test category menu image absolute URL
expected_url = settings.APP_BASE_URL + self.category.menu_image_two.url
self.assertEqual(
self.category.category_menu_image_two_absolute_url, expected_url
)
def category_menu_image_two_filename(self):
# Test category menu main image filename
expected_filename = os.path.basename(self.category.menu_image_two.name)
self.assertEqual(
self.category.category_menu_image_two_filename, expected_filename
)
def test_category_menu_main_banner_absolute_url(self):
# Test category menu image absolute URL
expected_url = settings.APP_BASE_URL + self.category.menu_main_banner.url
self.assertEqual(
self.category.category_menu_main_banner_absolute_url, expected_url
)
def category_menu_main_banner_filename(self):
# Test category menu main image filename
expected_filename = os.path.basename(self.category.menu_main_banner.name)
self.assertEqual(
self.category.category_menu_main_banner_filename, expected_filename
)
def tearDown(self) -> None:
super().tearDown()
self.user.delete()
self.vat.delete()
self.sub_category.delete()
self.category.delete()
| vasilistotskas/grooveshop-django-api | tests/integration/product/category/test_model_product_category.py | test_model_product_category.py | py | 11,197 | python | en | code | 4 | github-code | 13 |
74134928019 | from copy import deepcopy
from profession import PROFESSION_LIST
import json
class Player:
def __init__(self, name):
self.name = name
self.profession = PROFESSION_LIST["None"]
self.attributes = None
self.ready = False
self.statuses = []
self.is_alive = True
self.actions = []
def set_profession(self, profession):
self.profession = PROFESSION_LIST[profession]
self.attributes = {}
self.attributes["hp"] = 0 + self.profession.base_attributes["base_hp"]
self.attributes["max_hp"] = 0 + self.profession.base_attributes["base_hp"]
self.attributes["ap"] = 0 + self.profession.base_attributes["base_ap"]
self.attributes["max_ap"] = 0 + self.profession.base_attributes["base_ap"]
self.attributes["mana"] = 0 + self.profession.base_attributes["base_mana"]
self.attributes["max_mana"] = 0 + self.profession.base_attributes["base_mana"]
self.actions = self.profession.actions
def set_ready(self, ready):
self.ready = ready
def process_statuses(self):
for status in self.statuses:
modifier = status.modifier
attribute = modifier.attribute
change = modifier.change
duration = status.duration
duration_delta = status.duration_delta
self.attributes[attribute] = self.attributes[attribute] - change
if self.attributes[attribute] < 0:
self.attributes[attribute] = 0
duration -= 1
change -= duration_delta
if change < 0:
change = 0
if duration == 0:
self.statuses.remove(status)
if self.attributes["hp"] <= 0:
self.is_alive = False
def lobby_dict(self):
lobby_dict = {}
lobby_dict["name"] = self.name
lobby_dict["profession"] = self.profession.name
lobby_dict["profession_description"] = self.profession.description
lobby_dict["ready"] = self.ready
return lobby_dict
def game_dict(self):
game_dict = {}
game_dict["name"] = self.name
game_dict["profession"] = self.profession.name
game_dict["hp"] = [self.attributes["hp"], self.attributes["max_hp"]]
game_dict["ap"] = [self.attributes["ap"], self.attributes["max_ap"]]
game_dict["mana"] = [self.attributes["mana"], self.attributes["max_mana"]]
return game_dict
def get_empty_lobby_dict():
lobby_dict = {}
lobby_dict["name"] = ""
lobby_dict["profession"] = ""
lobby_dict["profession_description"] = ""
lobby_dict["ready"] = True
return lobby_dict
def get_empty_game_dict():
game_dict = {}
game_dict["name"] = ""
game_dict["profession"] = ""
game_dict["hp"] = [0,0]
game_dict["ap"] = [0,0]
game_dict["mana"] = [0,0]
return game_dict
| forsytheda/tiny-pyrpg | src/server/player.py | player.py | py | 2,896 | python | en | code | 1 | github-code | 13 |
38910422425 | command = input()
coffees = 0
actions = ['coding', 'dog', 'cat', 'movie']
while command != 'END':
command_low = command.lower()
if command_low in actions:
if command.isupper():
coffees += 2
else:
coffees +=1
command = input()
if coffees <= 5:
print(coffees)
else:
print('You need extra sleep')
| chomarliga/python-fundamentals | Python_Fundamentals/01_Basic_syntax_ex/coffee_need.py | coffee_need.py | py | 355 | python | en | code | 0 | github-code | 13 |
74324377616 |
import requests
from bs4 import BeautifulSoup
import xml.etree.ElementTree as ET
def _make_request(market: str, symbol: str, page_size=10, start=0):
""" Make request to Google """
params = {
'q': '{market}:{symbol}'.format(market=market, symbol=symbol),
'num': page_size,
'start': start,
'output': 'rss'
}
url = 'https://www.google.com/finance/company_news'
return requests.get(url, params=params)
def _format_links(links):
return [{'url': link.attrs.get('href'), 'text': link.get_text()} for link in links]
def _parse_item(item):
result = {}
description = item.find('description')
for _text in description.itertext():
soup = BeautifulSoup(_text, 'html.parser')
result['content'] = soup.get_text().strip()
result['content_links'] = _format_links(soup.find_all('a'))
title = item.find('title')
for _text in title.itertext():
result['title'] = _text.strip()
guid = item.find('guid')
for _text in guid.itertext():
result['guid'] = _text.strip()
link = item.find('link')
for _text in link.itertext():
result['link'] = _text.strip()
published = item.find('pubDate')
for _text in published.itertext():
result['published'] = _text.strip()
return result
def _parse_response(response_string):
root = ET.fromstring(response_string)
channel = root.find('channel')
result = [_parse_item(item) for item in channel.findall('item')]
return result
| bjornstromeqt/lambda-finance | src/google_finance/company_news.py | company_news.py | py | 1,527 | python | en | code | 0 | github-code | 13 |
39750635854 | #!/usr/bin/env python3
import os
import socket
import threading
import json
import jigly.cache
testlist = {
"nier": [
"../expers/nier1.mp3",
"../expers/nier2.mp3",
]
}
files = [
"expers/nier1.mp3",
"expers/nier2.mp3",
]
chunksize = 2048
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(("127.0.0.1", 34567))
closser_thread = threading.Thread(target=jigly.cache.closser, args=())
closser_thread.start()
while 1:
(dgramm, address) = sock.recvfrom(2048)
print("Received from {}: {}".format(address, dgramm))
trent = json.loads(dgramm)
print(trent)
if trent["cmd"] == "getchunk":
fileid = trent["id"]
chunk = trent["chunk"]
print("fileid", fileid)
print("chunk", chunk)
filepath = files[fileid]
file = jigly.cache.get_opened_file(filepath)
file.seek(chunksize * chunk)
data = file.read(chunksize)
print(data)
| mirmik/jigly | jigly/__main__.py | __main__.py | py | 868 | python | en | code | 0 | github-code | 13 |
73754997459 | # -problem3_6.py *- coding: utf-8 -*-
import sys
txtfile = sys.argv[1]
ctfile = sys.argv[2]
text = open(txtfile)
ct = open(ctfile, 'w')
for line in text:
line = line.strip("\n")
ct.write(str(len(line))+"\n")
text.close()
ct.close()
| xianan2/Python-Programming-A-Concise-Intro | Week 3/problem3_6.py | problem3_6.py | py | 249 | python | en | code | 0 | github-code | 13 |
40999129415 |
# Plotting Function -----------------------------------------------------------------------
def clustering_plot(X, labels=[], centers=[], title=None, figsize='auto', alpha=1,
xscale='linear', yscale='linear'):
import itertools
combination_set = list(itertools.combinations(X.columns,2))
len_comb = len(combination_set)
if len_comb == 1:
dim = 1; nrows = 1; ncols = 1;
elif len_comb == 2:
dim = 1; nrows = 1; ncols = 2;
elif len_comb == 3:
dim = 1; nrows = 1; ncols = 3;
elif len_comb % 3 == 0:
dim = 2; nrows = len_comb // 3; ncols = 3;
elif len_comb % 2 == 0:
dim = 2; nrows = len_comb // 2; ncols = 2;
# print(dim, nrows, ncols)
if list(labels): # label
unique_label = np.unique(labels)
if list(centers): #centers:
center_frame = pd.DataFrame(centers, columns=X.columns)
if figsize == 'auto':
figsize = (ncols*5, nrows*4)
fig, axe = plt.subplots(nrows=nrows, ncols=ncols, figsize=figsize)
for ci, (x1, x2) in enumerate(combination_set):
# print(ci, x1, x2)
if nrows==1 and ncols==1:
if list(labels):
for l in unique_label:
cluster_mask = (labels == l)
X_mask = X.iloc[cluster_mask, :]
axe.scatter(X_mask[x1], X_mask[x2], label=str(l), edgecolors='white', alpha=alpha)
axe.legend()
else:
axe.scatter(X[x1], X[x2], c='skyblue', edgecolors='white', alpha=alpha)
axe.set_xlabel(x1)
axe.set_ylabel(x2)
axe.set_xscale(xscale)
axe.set_yscale(yscale)
if list(centers):
axe.scatter(center_frame[x1], center_frame[x2], marker='*', c='r', s=200, edgecolors='white')
elif dim == 1:
if list(labels):
for l in unique_label:
cluster_mask = (labels == l)
X_mask = X.iloc[cluster_mask, :]
axe[ci].scatter(X_mask[x1], X_mask[x2], label=str(l), edgecolors='white', alpha=alpha)
axe[ci].legend()
else:
axe[ci].scatter(X[x1], X[x2], c='skyblue', edgecolors='white', alpha=alpha)
axe[ci].set_xlabel(x1)
axe[ci].set_ylabel(x2)
axe[ci].set_xscale(xscale)
axe[ci].set_yscale(yscale)
if list(centers):
axe[ci].scatter(center_frame[x1], center_frame[x2], marker='*', c='r', s=200, edgecolors='white')
else: # dim == 2:
if list(labels):
for l in unique_label:
cluster_mask = (labels == l)
X_mask = X.iloc[cluster_mask, :]
axe[ci//ncols-1][ci%ncols].scatter(X_mask[x1], X_mask[x2], label=str(l), edgecolors='white', alpha=alpha)
axe[ci//ncols-1][ci%ncols].legend()
else:
axe[ci//ncols-1][ci%ncols].scatter(X[x1], X[x2], c='skyblue', edgecolors='white', alpha=alpha)
axe[ci//ncols-1][ci%ncols].set_xlabel(x1)
axe[ci//ncols-1][ci%ncols].set_ylabel(x2)
axe[ci//ncols-1][ci%ncols].set_xscale(xscale)
axe[ci//ncols-1][ci%ncols].set_yscale(yscale)
if list(centers):
axe[ci//ncols-1][ci%ncols].scatter(center_frame[x1], center_frame[x2], marker='*', c='r', s=200, edgecolors='white')
if title:
fig.suptitle(title, fontsize=15)
plt.show()
# -----------------------------------------------------------------------------------------------------
| kimds929/DS_Library | DS_Clustering.py | DS_Clustering.py | py | 3,710 | python | en | code | 0 | github-code | 13 |
11665895191 | import json
import logging
import os
from queue import Queue
import pymssql
import pandas as pd
import numpy as np
import requests
from src.config.config import *
from src.model.head import HeadModel
from src.model.lr_model import LRModel
from src.model.tail import TailModel
from src.utils.util import read_config, save_config
def load_all_model_dir() -> list:
return sorted(os.listdir(MODEL_SAVE_DIR), reverse=True)
def load_latest_model_dir() -> str:
return load_all_model_dir()[0]
def load_current_model(param: str) -> str:
current_dir = load_latest_model_dir()
if param in ['produce', 'transition']:
for file in os.listdir(MODEL_SAVE_DIR + current_dir):
# MACOS 会存在 .DS_Store
if file.startswith('.'):
continue
if os.path.splitext(file)[0].split('#')[1] == 'produce':
return current_dir + "/" + os.path.splitext(file)[0]
elif param in ['head', 'one-hot-brands']:
return current_dir + "/" + current_dir + '#' + param
else:
raise Exception('param MUST in [produce, transition, head, one-hot-brands], now is ' + param)
def humid_stable(original_humid: list, setting: float) -> bool:
"""
连续 10 条数据出口水分与设定值误差不大于 0.1, 则认为出口水分已稳定
:param original_humid: 输入的出口水分数据
:param setting: 出口水分设定值
:return:
"""
try:
if len(original_humid) < 10:
return False
original_humid = original_humid[-10:]
original_humid_diff = np.array([abs(float(i) - setting) for i in original_humid])
if np.any(original_humid_diff > 0.1):
return False
return True
except Exception as e:
logging.error('humid_stable error: {}'.format(e))
return False
class Determiner:
def __init__(self) -> None:
super().__init__()
self.head_model = None
self.tail_model = None
self.transition_model = None
self.produce_model = None
# 计算下个批次预热的
# self.next_range_1 = next_range_1
# self.next_range_2 = next_range_2
# 计算5000叶丝线暂存柜半满的
self.humid_after_cut = []
self.cut_half_full_counter = 0
# 计算头料的
self.head_flag = False
# 过渡状态
self.transition_flag = False
# 计算生产状态的
self.produce_flag = False
# 计算尾料的
self.tail_flag = False
self.q = Queue()
self.adjust_params = {}
self.counter = 0
def read_adjust_params(self, brand):
sql = """
SELECT FeedbackN, FeedbackM, FeedbackK, FeedbackS, Tagcode, Min, Max
FROM ML.dbo.FeedbackValue WHERE Process = 'LD5' AND Batch = '{}'
""".format(brand)
# default values
n, m, k, s, min_1, max_1, min_2, max_2 = 1, 30, 10, 0.7, 130, 140, 130, 140
try:
# server, user, password, database
conn = pymssql.connect(server='10.100.100.114',
user='sa',
password='Password01!',
database='ML')
cursor = conn.cursor()
cursor.execute(sql)
rows = cursor.fetchall()
for row in rows:
n = int(row[0])
m = int(row[1])
k = float(row[2])
s = float(row[3])
if row[4] == STANDARD_TEMP_1:
min_1 = int(row[5])
max_1 = int(row[6])
if row[4] == STANDARD_TEMP_2:
min_2 = int(row[5])
max_2 = int(row[6])
except Exception as e:
logging.error(e)
self.adjust_params = {
"n": n,
"m": m,
"k": k,
"s": s,
"max_1": max_1,
"min_1": min_1,
"max_2": max_2,
"min_2": min_2,
}
@classmethod
def read_standard(cls, brand, default_1, default_2):
try:
# 请求预热的配置
body = {
"BrandCode": brand,
"WorkstageCode": "LD5",
"TagReads": [
STANDARD_TEMP_1,
STANDARD_TEMP_2
]
}
res = requests.post(CONFIG_URL, json=body)
logging.info('Standard: {} {}'.format(res.status_code, res.text))
if (res.status_code / 100) == 2:
json_obj = json.loads(res.text)
rows = json_obj.get('data').get('Rows')
standard_1 = default_1
standard_2 = default_2
for row in rows:
if row.get('TagRead') == STANDARD_TEMP_1:
standard_1 = float(row.get('ParmSet')) - 3
if row.get('TagRead') == STANDARD_TEMP_2:
standard_2 = float(row.get('ParmSet')) - 3
return None, {
'standard_1': standard_1,
'standard_2': standard_2
}
else:
return res.text, None
except Exception as e:
return str(e), None
def init_model(self, next_range_1: int, next_range_2: int):
self.head_model = HeadModel()
self.tail_model = TailModel(next_range_1, next_range_2)
self.produce_model = LRModel()
self.transition_model = LRModel()
self.head_model.load(MODEL_SAVE_DIR + load_current_model('head'))
self.produce_model.load(MODEL_SAVE_DIR + load_current_model('produce'))
self.transition_model.load(MODEL_SAVE_DIR + load_current_model('transition'))
# 计算头料的
self.head_flag = False
# 过渡状态
self.transition_flag = False
# 计算生产状态的
self.produce_flag = False
# 计算尾料的
self.tail_flag = False
self.humid_after_cut = [] # 清空
self.cut_half_full_counter = 0
self.q = Queue()
self.counter = 0
def dispatch(self, df: pd.DataFrame, features: np.array) -> list:
"""
:param df: 一个Windows长度的数据,数组最后一个点的数据为当前时刻的数据
:param features: 特征:只有produce才会使用
非常重要的一个的方法,根据数据来判断使用那个模型,并进行预测,然后输出结果
:return:
"""
len_ = len(df)
if len_ < MIN_DATA_NUM:
raise Exception('len(originals) MUST >= {}'.format(MIN_DATA_NUM))
current_data = df.iloc[len_ - 1] # 最新的一条数据
last_data = df.iloc[len_ - 2] # 上一秒一条数据
current_batch = read_config('current_batch')
logging.info('Load current batch: {}'.format(current_batch))
current_brand = current_data[BRADN]
# current_batch = None
try:
# 计算切后水分,只选取 5000 叶丝线暂存柜半满后的三分钟的数据
# 改为:切后水分仪计算到时间范围:以入口水分大于17后的60S开始计时,持续到半满后的2分钟
# 5H.5H.LD5_KL2226_InputMoisture
# TODO 优化为queue的形式
if float(current_data[HUMID_AFTER_CUT]) > 17.5 and self.cut_half_full_counter < 180:
self.humid_after_cut.append(float(current_data[HUMID_AFTER_CUT]))
if current_data[CUT_HALF_FULL]:
self.cut_half_full_counter += 1
self.q.put(float(current_data[HUMID_BEFORE_DRYING]))
if self.q.qsize() > MAX_BEFORE_HUMID_SIZE:
self.q.get()
# 一个批次的开始
if not current_batch or current_batch != current_data[BATCH]:
current_batch = current_data[BATCH]
save_config('current_batch', current_batch)
err, standard_obj = self.read_standard(current_brand, DEFAULT_STANDARD_1, DEFAULT_STANDARD_2)
if not err:
logging.info('Get standard success: {}'.format(standard_obj))
self.init_model(standard_obj.get('standard_1'), standard_obj.get('standard_2'))
else:
logging.error('Get standard error: {}'.format(err))
self.init_model(DEFAULT_STANDARD_1, DEFAULT_STANDARD_2)
# 在每个批次开始的时候读取反馈控制
self.read_adjust_params(brand=current_brand)
logging.info('Checkpoint 1 --- Check Stage')
# 当前点的流量增长到了 2000 --> HeadModel
if float(last_data[FLOW]) < FLOW_LIMIT < float(current_data[FLOW]):
self.head_flag = True
self.transition_flag = False
self.produce_flag = False
self.tail_flag = False
# 当前点有了出口水分,并且未进入生产阶段 --> TransitionModel
if float(current_data[HUMID_AFTER_DRYING]) > HUMID_EPSILON and self.head_flag:
self.head_flag = False
self.transition_flag = True
self.produce_flag = False
self.tail_flag = False
# 当前就是生产阶段,或者出口水分已稳定 --> ProductModel
if self.produce_flag is True or humid_stable(list(df[HUMID_AFTER_DRYING].values),
float(criterion[current_brand])):
self.head_flag = False
self.transition_flag = False
self.produce_flag = True
self.tail_flag = False
# 流量小于2000,并且之前状态是生产状态 --> TailModel
if FLOW_LIMIT > float(current_data[FLOW]) and self.produce_flag:
self.head_flag = False
self.transition_flag = False
self.produce_flag = False
self.tail_flag = True
logging.info('Checkpoint 2 --- Check Stage Default')
# 兜底策略
if not self.head_flag and not self.produce_flag and not self.tail_flag and not self.transition_flag:
if int(current_data[WORK_STATUS1]) == 32:
self.head_flag = False
self.transition_flag = False
self.produce_flag = True
self.tail_flag = False
elif int(current_data[WORK_STATUS1]) == 16 or int(current_data[WORK_STATUS1]) == 8:
self.head_flag = True
self.transition_flag = False
self.produce_flag = False
self.tail_flag = False
elif int(current_data[WORK_STATUS1]) == 16 or int(current_data[WORK_STATUS1]) == 64:
self.head_flag = False
self.transition_flag = False
self.produce_flag = False
self.tail_flag = True
else:
raise Exception('Invalid work status. So we will use last 2 temp as current temp. FLOW: {}'.format(
current_data[FLOW]))
logging.info('Checkpoint 3 --- Check Stage Finish')
if self.head_flag:
logging.info('Current in Head Model.')
try:
humid_after_cut = sum(self.humid_after_cut) / len(self.humid_after_cut)
except ZeroDivisionError as e:
logging.info(
'ZeroDivisionError: {}, {}'.format(sum(self.humid_after_cut), len(self.humid_after_cut)))
humid_after_cut = 17
pred = self.head_model.predict(brand=current_data[BRADN],
flow=float(current_data[FLOW]),
humid_after_cut=humid_after_cut,
standard_temp_2=float(current_data[STANDARD_TEMP_2]),
standard_temp_1=float(current_data[STANDARD_TEMP_1]),
last_temp_1=float(current_data[TEMP1]),
last_temp_2=float(current_data[TEMP2]))
logging.info('Head timer: {}'.format(self.head_model.timer))
return list(pred)
if self.transition_flag:
logging.info('Current in Transition Model.')
brand = current_data[BRADN]
input_humid = list(self.q.queue)
try:
input_humid = sum(input_humid) / len(input_humid)
except ZeroDivisionError as e:
logging.info('ZeroDivisionError: {}, {}'.format(sum(input_humid), len(input_humid)))
input_humid = 17
# 暂时使用Head模型,增加了下惩罚项
last_temp_1 = float(
self.head_model.stable_per_brand[brand][0] + self.head_model.ratio[brand][0]
* input_humid * 1.15 + float(current_data[STANDARD_TEMP_1]))
last_temp_2 = float(
self.head_model.stable_per_brand[brand][1] + self.head_model.ratio[brand][1]
* input_humid * 1.15 + float(current_data[STANDARD_TEMP_2]))
return [last_temp_1, last_temp_2]
if self.produce_flag:
logging.info('Current in Produce Model.')
self.counter += 1
pred = self.produce_model.predict(features)
return list(pred.ravel())
if self.tail_flag:
logging.info('Current in Tail Model.')
finish, pred = self.tail_model.predict(flow=float(current_data[FLOW]),
last_temp_1=float(current_data[TEMP1]),
last_temp_2=float(current_data[TEMP2]))
# TODO: 逻辑还需要在处理下
# if finish:
# save_config('current_batch', None)
logging.info('Tail timer: {}, is_finish: {}'.format(self.tail_model.timer, finish))
return list(pred)
except Exception as e:
logging.error(e)
save_config('current_batch', None)
return [float(current_data[TEMP1]), float(current_data[TEMP2])]
| Ferrair/qingdao | src/manager/model_manager.py | model_manager.py | py | 14,618 | python | en | code | 0 | github-code | 13 |
14819761483 | import gradio as gr
import numpy as np
import torch
import transformers
from diffusers import StableDiffusionInpaintPipeline
from PIL import Image
from segment_anything import sam_model_registry, SamPredictor
import matplotlib.pyplot as plt
from datetime import datetime
import os
import json
sam_checkpoint = "sam_vit_h_4b8939.pth"
model_type = "vit_h"
device = "cuda"
sam = sam_model_registry[model_type](checkpoint=sam_checkpoint)
sam.to(device)
predictor = SamPredictor(sam)
pipe = StableDiffusionInpaintPipeline.from_pretrained(
"models--stabilityai--stable-diffusion-2-inpainting/snapshots/6ba40839c3c171123b2b863d16caf023e297abb9", torch_dtype=torch.float16,)
pipe = pipe.to(device)
def plot_points(coords, labels, ax, marker_size=375):
print(f'coords shape: {coords.shape}')
print(f'labels shape: {labels.shape}')
pos_points = coords[labels == 1]
neg_points = coords[labels == 0]
print(f'pos_points shape: {pos_points.shape}')
print(f'neg_points shape: {neg_points.shape}')
ax.scatter(pos_points[:, 0], pos_points[:, 1], color='green', marker='.', s=marker_size, edgecolor='white', linewidth=1.25)
ax.scatter(neg_points[:, 0], neg_points[:, 1], color='red', marker='.', s=marker_size, edgecolor='white', linewidth=1.25)
selected_pixels = []
selected_labels = []
current_label = 1
with gr.Blocks() as demo:
with gr.Row():
gr.Markdown("## <center>ReCityGen</center>")
with gr.Row():
user_name = gr.Textbox(lines=1, label="User Name",
placeholder="Enter User ID")
with gr.Row():
input_img_o = gr.Image(label="Input")
with gr.Row():
upload_button = gr.Button(label="Upload", value="Upload")
with gr.Row():
input_img = gr.Image(label="Resized")
mask_img = gr.Image(label="Mask", source="upload", tool="sketch")
mask_img_with_label = gr.Plot()
with gr.Blocks():
gr.Markdown("#### Edit Masks")
with gr.Row():
add_button = gr.Button(label="Add Mask", value=" + ")
remove_button = gr.Button(label="Remove Area", value=" - ")
with gr.Row():
reset_button = gr.Button(label="Reset", value="Reset")
with gr.Row():
output_img = gr.Image(label="Output")
with gr.Blocks():
prompt_text = gr.Textbox(lines=1, label="Prompt")
with gr.Row():
generateBtn = gr.Button("生成")
with gr.Row():
sumbit = gr.Button("提交")
with gr.Row():
survey = gr.Textbox(
label="Survey", placeholder="Survey link: https://www.google.com", disabled=True)
def on_upload_button_clicked(image):
# Convert np.array image to PIL Image
image = Image.fromarray(image)
width, height = image.size
min_dim = min(width, height)
left = (width - min_dim)/2
top = (height - min_dim)/2
right = (width + min_dim)/2
bottom = (height + min_dim)/2
image = image.crop((left, top, right, bottom))
image_resized = image.resize((512, 512),Image.ANTIALIAS)
return image_resized
def on_add_button_clicked():
#print('Add Button Clicked!')
global current_label
current_label = 1
#print(current_label)
def on_remove_button_clicked():
#print('Remove Button Clicked!')
global current_label
current_label = 0
#print(current_label)
def reset_mask(mask_img, output_img):
global selected_pixels, selected_labels
selected_pixels = []
selected_labels = []
mask_img = None
output_img = None
return mask_img, output_img
def generate_mask(image, evt: gr.SelectData):
#print(evt.index)
selected_pixels.append(evt.index)
selected_labels.append(current_label)
predictor.set_image(image)
input_points = np.array(selected_pixels)
input_labels = np.array(selected_labels)
# print(selected_labels)
# print(input_labels)
mask, _, _ = predictor.predict(
point_coords=input_points,
point_labels=input_labels,
multimask_output=False
)
mask = Image.fromarray(mask[0, :, :])
# print("Generated Mask:", mask)
return mask
def show_points(image):
global selected_pixels
global selected_labels
input_points = np.array(selected_pixels)
input_labels = np.array(selected_labels)
img = np.asarray(image)
# print(img.shape)
# Set Canvas Size equal to UI Component boundary
fig = plt.figure(figsize=(5,5))
plt.imshow(img)
# Hide Axis
plt.axis('off')
plot_points(input_points, input_labels, plt.gca())
# show_mask(mask_img, plt.gca())
return fig
def inpaint(image, mask, prompt):
# print(image)
image = Image.fromarray(image)
# print(mask)
mask = Image.fromarray(mask["image"])
image = image.resize((512, 512))
mask = mask.resize((512, 512))
output = pipe(
prompt=prompt,
image=image,
mask_image=mask,
).images[0]
# print(output)
return output
def saveFile(user_name, image, mask, output, prompt):
if user_name == "":
print('Please enter a user name to proceed.')
now = datetime.now()
timestamp = now.strftime("%Y%m%d_%H%M%S")
# create a subdirectory for each user
subdir = f'../../output/{user_name}_{timestamp}'
if not os.path.exists(subdir):
os.makedirs(subdir)
img_filename = f'{subdir}/{user_name}_{timestamp}_input.png'
mask_filename = f'{subdir}/{user_name}_{timestamp}_mask.png'
output_filename = f'{subdir}/{user_name}_{timestamp}_output.png'
prompt_filename = f'{subdir}/{user_name}_{timestamp}_prompt.txt'
Image.fromarray(image).save(img_filename)
Image.fromarray(mask["image"]).save(mask_filename)
Image.fromarray(output).save(output_filename)
with open(prompt_filename, 'w') as f:
f.write(prompt)
# Save user data in JSON file
user_data = {
'user_name': user_name,
'timestamp': timestamp,
'prompt': prompt
}
json_filename = f'../../output/user_data.json'
data = []
# Load the existing data if file exists
if os.path.exists(json_filename):
with open(json_filename, 'r') as f:
data = json.load(f)
# Append new data
data.append(user_data)
# Write data back to file
with open(json_filename, 'w') as f:
json.dump(data, f, indent=4)
upload_button.click(on_upload_button_clicked, [input_img_o], [input_img])
add_button.click(on_add_button_clicked, [], [])
remove_button.click(on_remove_button_clicked, [], [])
reset_button.click(reset_mask, [mask_img, output_img], [mask_img, output_img])
input_img.select(generate_mask,inputs=[input_img],outputs=[mask_img])
input_img.select(show_points, inputs=[input_img],outputs=[mask_img_with_label])
generateBtn.click(inpaint,
inputs=[input_img, mask_img, prompt_text],
outputs=[output_img],
)
sumbit.click(saveFile,
inputs=[user_name, input_img, mask_img, output_img, prompt_text],outputs=[]
)
| Myangsun/Streetview-app | backend/gradioapp.py | gradioapp.py | py | 7,554 | python | en | code | 0 | github-code | 13 |
73292674896 | import yaml
from heat2arm.parser.common.exceptions import TemplateDataException
from heat2arm.parser.cfn import FUNCTIONS as cfn_functions
from heat2arm.parser.cfn import RESOURCE_CLASS as cfn_resource_class
from heat2arm.parser.cfn import CFN_TEMPLATE_FIELDS as cfn_template_fields
from heat2arm.parser.hot import FUNCTIONS as heat_functions
from heat2arm.parser.hot import RESOURCE_CLASS as heat_resource_class
from heat2arm.parser.hot import HEAT_TEMPLATE_FIELDS as heat_template_fields
class Template(object):
""" Template represents a template in all its entirety.
It allows the resource and function parsers to access the parameters,
variables and resource definitions defined within the template.
When defined, it simply takes the string representing the template and
loads all of its contents.
"""
def __init__(self, template):
""" A template object is created by passing in the string
representing the template.
It goes ahead and uses the standard yaml module to load the contents of
the template and stores in its attributes the provided data.
"""
# the classic fields of any template:
self.parameters = None
self.resources = None
self.variables = {}
self._template_fields = {}
# the list of templating language functions which can be applied:
self._functions = []
# NOTE: considering JSON is a subset of YAML since the 1.2
# version of YAML's specification; we directly use the yaml
# module for parsing the input template.
self._template_data = yaml.load(template)
# check whether we're dealing with a CFN or a Heat template and define
# the appropriate fields:
if self._test_template_data(cfn_template_fields.values()):
# then, load the CFN-style fields, functions and Resource class:
self._template_fields = cfn_template_fields
self._init_functions(cfn_functions)
self._resource_class = cfn_resource_class
elif self._test_template_data(heat_template_fields.values()):
# then, load the Heat-style fields, functions and Resource class:
self._template_fields = heat_template_fields
self._init_functions(heat_functions)
self._resource_class = heat_resource_class
else:
raise TemplateDataException(
"Template has none of the expected fields: '%s'",
self._template_data
)
self._validate_template_data()
# extract our required fields:
self.parameters = self._template_data[self._template_fields[
"parameters"
]]
self.resources = self._template_data[self._template_fields[
"resources"
]]
self.variables = self._template_data.get(
self._template_fields["variables"], {}
)
# NOTE: we pop out the outputs section of the template to ease parsing,
# as it's useless to the translation process anyhow:
if self._template_fields["outputs"] in self._template_data:
self._template_data.pop(self._template_fields["outputs"])
def reduce_functions(self):
""" reduce_functions reduces all the functions from within a template's
data.
"""
self._template_data = self._reduce_functions(self._template_data)
def parse_resources(self):
""" parse_resources instantiates all the resource classes from the
resource data from within the template and returns their dict.
"""
return {
name: self._resource_class(name, data) for name, data in
self.resources.items()
}
def _validate_template_data(self):
""" _validate_template_data is a helper method which checks for the
bare minimal set of fields for the data to be considered a template.
"""
mandatories = {
self._template_fields["parameters"],
self._template_fields["resources"]
}
for field in mandatories:
if field not in self._template_data:
raise TemplateDataException(
"Missing template field '%s'." % field
)
if not isinstance(self._template_data[field], dict):
raise TemplateDataException(
"Template field '%s' must be a dict, got: '%s'" % (
field,
self._template_data[field]
)
)
def _test_template_data(self, expected_fields):
""" _test_template_data is a helper method which, provided a list of
expected fields, returns a boolean to signal whether or not the given
data contains the required fields for it to count as a template.
"""
if not isinstance(self._template_data, dict):
raise TemplateDataException(
"Top level of template is '%s', not 'dict': '%s'.",
type(self._template_data),
self._template_data
)
template_keys = self._template_data.keys()
return set(template_keys).issubset(expected_fields)
def _init_functions(self, functions):
""" _init_functions takes a list of classes which represent templating
language functions and instantiates them.
"""
self._functions = {func.name: func(self) for func in functions}
def _apply_function(self, data):
""" _apply_function is a helper method which, given a dict, determines
whether it is a function application and returns the result of that
application.
"""
if len(data) != 1:
# it means it's not a function and we return as-is:
return data
for key, val in data.items():
# check if it is a function:
if key in self._functions:
# if so, return the result of applying the function:
return self._functions[key].apply(val)
# else, it means it's not a function and we return as-is:
return data
def _reduce_functions(self, obj):
""" _reduce_functions takes the whole template and recursively
traverses it, applying all the templating functions depth-first.
This is necessary to ensure consistency for outer-functions such
as the join function to not be put off by another nested one.
"""
is_list = isinstance(obj, list)
is_dict = isinstance(obj, dict)
if not (is_list or is_dict):
# it means it's an uninteresting object and we can just return it:
return obj
if is_list:
# apply to each element of the list:
return [self._reduce_functions(item) for item in obj]
if is_dict:
# reduce each element of the dict before reducing the whole:
for key, val in obj.items():
obj[key] = self._reduce_functions(val)
return self._apply_function(obj)
| cloudbase/heat2arm | heat2arm/parser/template.py | template.py | py | 7,112 | python | en | code | 7 | github-code | 13 |
24490023845 | """Write a Python program for department library which has N books, write functions for
following:
a) Delete the duplicate entries
b) Display books in ascending order based on cost of books
c) Count number of books with cost more than 500.
d) Copy books in a new list which has cost less than 500."""
library = {}
books = []
costs = []
n = int(input("Enter the number of books in the Library"))
a = 0
for a in range(n):
book = input("Enter the name of the book")
cost = int(input("Enter the cost of the book"))
library.update({book: cost}) # unimportant but there a colon, not a comma
books.append(book)
costs.append(cost)
for m, n in zip(books, costs): # zip presents two lists together according to the order of the elements
print("{} is available for {}".format(books, costs))
print("***********************************")
def deleteDuplicates():
for i in library.items():
print("This library has :", i)
print("***********************************")
def ascOrder():
sorted_library = sorted(library.items(), key=lambda x: x[1]) # A lambda function is a small anonymous function.
# A lambda function can take any number of arguments, but can only have one expression.
print("Sorted library :")
for i in sorted_library:
print(i)
print("***********************************")
def moreThan500():
highcost500 = []
for (i, j) in zip(books, costs):
if j > 500: # need to specify where j>500 should be, otherwise you'll get an error
highcost500.append((i, j))
print("The following books cost more than 500: {}".format(highcost500))
print("There are {} books in this list".format(len(highcost500)))
print("***********************************")
def lessThan500():
lowcost500 = []
for (i, j) in zip(books, costs):
if j < 500:
lowcost500.append((i, j))
print("The following books cost less than 500", lowcost500)
print("There are {} books in this list".format(len(lowcost500)))
print("***********************************")
deleteDuplicates()
ascOrder()
moreThan500()
lessThan500()
| AditiMooley/BasicLearning | library_books.py | library_books.py | py | 2,149 | python | en | code | 0 | github-code | 13 |
44813293762 | from selenium import webdriver
from lib import random_num, random_date, save_image, scroll_down, croll_data_to_csv
from PIL import Image
import csv
import time
import re
category_ids = ["100571", "100610", "100615"]
def app():
input_prod_id = 1
error_count = 0
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument("--headless")
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument("--disable-dev-shm-usage")
driver = webdriver.Chrome(
"C:\Dev\chromedriver_win32\chromedriver.exe", chrome_options=chrome_options)
# 카테고리별 접근
for id in category_ids:
driver.get(
f"https://tohome.thehyundai.com/front/dp/dpc/ctgrList.do?highCtgrId={id}")
driver.implicitly_wait(1)
head_name = driver.find_element_by_css_selector(
'section.categorylist > div.depth > h2').text
# 끝까지 스크롤 내리기
scroll_down.infinite_loop(driver)
prod_lis = [link.find_element_by_css_selector(
"li > a").get_attribute('href') for link in driver.find_elements_by_css_selector("#ulItemList > li")]
# 각 물품에 들어가 정보 스크롤
for prod_li in prod_lis:
try:
# a의 href가 주소가아닌 js로 넘어가게 해놔서 정규표현식으로 params를 가지고와서 link를 만들어 접속
t = re.findall(
r"'.*'", prod_li[prod_li.find("fnProductDetailMove"):])
params = [string.strip("'") for string in t[0].split(',')]
next_href = f"https://tohome.thehyundai.com/front/pd/pdd/productDetail.do?slitmCd={params[0]}§Id={params[1]}&ctgrId={params[2]}"
driver.get(next_href)
# 안정화를 위해 잠시 타임
time.sleep(0.5)
# driver.get("https://tohome.thehyundai.com/front/pd/pdd/productDetail.do?slitmCd=S02006004353§Id=101007&ctgrId=119970")
# driver.implicitly_wait(3)
# ---- data 긁어오는 부분 ----
prod_id = input_prod_id
shop_name = driver.find_element_by_css_selector(
"#brand_section > a").text
prod_name = driver.find_element_by_css_selector(
".proinfo > h2 > strong").text
prod_price = ''
try:
prod_price = driver.find_element_by_css_selector(
"#price_section > .txt-price > del").text
except:
prod_price = driver.find_element_by_css_selector(
"#price_section > .txt-price > strong > em").text
url = driver.find_element_by_css_selector(
"div.propicbig > div > img").get_attribute('src')
category_id = id
subscribe_yn = random_num.make_random_num(1)
pack_type = driver.find_elements_by_css_selector(
".detailinfo > dl > dd")
if driver.find_element_by_css_selector(
".detailinfo > dl > dt").text == '원산지':
pack_type = pack_type[1].text
else:
pack_type = pack_type[0].text
prod_stock = random_num.make_random_num2(3, 11)
prod_info = prod_id
admin_no = "0707"
create_date = random_date.make_random_date(150)
# ---- data 긁어오는 부분 ----
# csv 저장
croll_data_to_csv.save_csv(
prod_id,
shop_name,
prod_name,
prod_price,
url,
category_id,
subscribe_yn,
pack_type,
prod_stock,
prod_info,
admin_no,
create_date,
head_name
)
img_out_path = f"out/{category_id}/{prod_id}.png"
# 제품설명 스크린샷
width = driver.execute_script(
"return document.body.scrollWidth") # 스크롤 할 수 있는 최대 넓이
height = driver.execute_script(
"return document.body.scrollHeight") # 스크롤 할 수 있는 최대 높이
driver.set_window_size(width, height) # 스크롤 할 수 있는 모든 부분을 지정
driver.save_screenshot(img_out_path)
element = driver.find_element_by_css_selector(
"#p_proDetail > .detailcont > .speedycat_container_root_class")
save_image.save_prod_detail_image(element, img_out_path)
input_prod_id += 1
print("success! category : {} prod_id {}".format(
head_name, prod_id))
except Exception as e:
error_count += 1
print("error count is " + str(error_count))
print(e)
driver.close()
if __name__ == "__main__":
app()
| parkyeomyeong/HyeoDai_tohome_crolling | main_app.py | main_app.py | py | 5,191 | python | en | code | 0 | github-code | 13 |
19093595824 | #!/usr/local/bin/python3
from weakref import WeakKeyDictionary
class Grade(object):
def __init__(self):
self._values = WeakKeyDictionary()
def __get__(self, instance, instance_type):
print("__get__: %r, %r" % (instance, instance_type))
# print("Before: ", self.__dict__)
if instance is None: return self
return self._values.get(instance, 0)
def __set__(self, instance, value):
print("__set__: %r, %r" % (instance, value))
if not (0 <= value <= 100):
raise ValueError("Grade % must be between 0 and 100")
print("Before: ", self._values)
self._values[instance] = value
print("After: ", self._values)
class Exam(object):
# NOTE: these are class attributes!
math = Grade()
science = Grade()
history = Grade()
if __name__ == "__main__":
print("Testing 'Descriptors' concept for Grade class")
exam1 = Exam()
exam1.math = 99
# NOTE: THIS IS ACCESSED AS
# Exam.__dict__['math'].__set__(exam1, 99)
# and when reading the attribute
# Exam.__dict__['math'].__get__(exam1, Exam)
exam1.science = 92
exam1.history = 88
print("Exam1 grades -> math: %d, science: %d, history: %d" % (exam1.math,
exam1.science,
exam1.history))
print(exam1.__dict__)
exam2 = Exam()
exam2.math = 91
exam2.science = 93
exam2.history = 95
print("Exam2 grades -> math: %d, science: %d, history: %d" % (exam2.math,
exam2.science,
exam2.history))
print(exam2.__dict__)
print(Exam.__dict__)
print(Grade.__dict__)
# assert exam1.math is not exam2.math
exam1.history = 500
| jkaria/coding-practice | python3/grade_descriptor.py | grade_descriptor.py | py | 1,909 | python | en | code | 0 | github-code | 13 |
28596131220 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 3 16:01:17 2020
@author: Xuheng Ding
You can skip this step if the QSO stamp, noise level and the PSF is ready.
"""
#photutils in version 0.7.2
#astropy in version astropy-4.0.1
import numpy as np
import matplotlib.pyplot as plt
import astropy.io.fits as pyfits
from matplotlib.colors import LogNorm
from decomprofile.tools_data.astro_tools import plt_fits
#Load data and plot:
fitsFile = pyfits.open('../example_data/HST/QSO/1147_final_drz.fits')
img = fitsFile[1].data # check the back grounp
plt_fits(img, figsize=(15,15))
#%%
import decomprofile.tools_data.astro_tools as astro_tools
print(astro_tools.read_fits_exp(fitsFile), astro_tools.read_pixel_scale(fitsFile,frame=0)) #Read the exposure time and pixel scale.
#%%
from decomprofile.tools_data.measure_tools import measure_bkg
bkglight = measure_bkg(img, if_plot=True)
img = img - bkglight #remove the bkglight
#%%
from decomprofile.tools_data.cutout_tools import cut_center_auto
QSO_loc = [1135, 648] # The postion of the QSO in the frame
QSO_img, QSO_center_pos = cut_center_auto(image=img, center= QSO_loc,
kernel = 'center_gaussian', radius=60,
return_center=True, if_plot=True)
plt_fits(QSO_img, colorbar = True)
#%%Creat the SB profile of the QSO:
from decomprofile.tools_data.measure_tools import SB_profile, esti_bgkstd
r_SB, r_grids = SB_profile(QSO_img, center = [(len(QSO_img)-1)/2]*2 , radius=20,
grids=50, x_gridspace='log',if_annuli=True, if_plot=True, fits_plot = True)
std = esti_bgkstd(QSO_img, if_plot=True)
#%%Test the way to creat the mask for the QSO:
from decomprofile.tools_data.measure_tools import detect_obj, mask_obj
apertures = detect_obj(QSO_img, if_plot=True)
select_mask_idx = [0,1,3]
apertures = [apertures[i] for i in select_mask_idx]
mask_list = mask_obj(QSO_img, apertures, if_plot=False)
mask = np.ones_like(QSO_img)
for i in range(len(mask_list)):
mask *= mask_list[i]
plt_fits(mask, colorbar = False)
#%%Auto find the PSF in the frames
from decomprofile.tools_data.measure_tools import search_local_max, measure_FWHM
from decomprofile.tools_data.cutout_tools import cutout
init_PSF_locs = search_local_max(img)
init_PSF_locs = np.array(init_PSF_locs)
FWHMs, fluxs = [], []
for i in range(len(init_PSF_locs)):
cut_image = cut_center_auto(img, center = init_PSF_locs[i], radius=60)
FWHMs.append(np.mean(measure_FWHM(cut_image , radius = 10)))
fluxs.append(np.sum(cut_image))
FWHMs = np.array(FWHMs)
fluxs = np.array(fluxs)
select_bool = (FWHMs<4.2)*(fluxs<5000)*(fluxs>200) # A threshold to rule out the PSFs that are too board/bright/faint.
PSF_locs = init_PSF_locs[select_bool]
#%%
for i in range(len(PSF_locs)):
cut_image = cut_center_auto(img, center = PSF_locs[i], kernel = 'center_gaussian', radius=60)
print('PSF location:', PSF_locs[i])
print('id:', i, 'FWHMs:', np.round(measure_FWHM(cut_image , radius = 10),3), 'flux:', round(np.sum(cut_image),) )
plt_fits(cut_image)
select = [3, 4, 5, 10]
PSF_locs_final = [PSF_locs[i] for i in select]
PSF_lists = [cut_center_auto(img, center = PSF_locs[i], kernel = 'center_gaussian', radius=50) for i in select]
from decomprofile.tools_data.measure_tools import profiles_compare
profiles_compare([QSO_img] + PSF_lists, x_gridspace= 'log', norm_pix = 5, if_annuli=True, y_log = False,
prf_name_list = (['QSO'] + ['PSF{0}'.format(i) for i in range(len(PSF_lists))]))
from decomprofile.tools_data.cutout_tools import plot_overview
plot_overview(img, center_QSO= QSO_center_pos, c_psf_list=PSF_locs_final)
pyfits.PrimaryHDU(QSO_img).writeto('QSO_image.fits', overwrite=True)
for i in range(len(PSF_locs_final)):
PSF_cut = cutout(img, center = PSF_locs_final[i], radius=50)
pyfits.PrimaryHDU(PSF_cut).writeto('PSF{0}.fits'.format(i),overwrite=True)
| dartoon/my_code | package_code/galight_example/data_prep_example.py | data_prep_example.py | py | 3,956 | python | en | code | 0 | github-code | 13 |
30626459827 | hist = [] #list to hold the chars we have counted already
two_char = 0 # number of ids that contain 2 of the same letter
three_char = 0 # number of ids that contain 3 of the same letter
for line in open("box_list.txt"): # loop through each line in the file
# 2 bools to hold the stats of the count
two = False
three = False
num_chars = 0
for char in line: # loop through each char in the id string
if len(hist) == 0 or char not in hist: # if the list is empty or its a new char
hist.append(char) # add char to list
num_chars = line.count(char) # count occurrences of the char
if num_chars == 2:
two = True
elif num_chars == 3:
three = True
if two:
two_char += 1
if three:
three_char += 1
hist.clear() # clear list for new id
checksum = two_char * three_char
print("Two chars: {}\nThree chars: {}\nChecksum: {}".format(two_char, three_char, checksum)) | Negative-Feedback/AOC_2018 | Day2/Day_Two_P1.py | Day_Two_P1.py | py | 997 | python | en | code | 0 | github-code | 13 |
10902470603 | from __future__ import annotations
import copy
import typing
from enum import Enum
from typing import Iterator, List, Optional
Position3D = typing.Tuple[float, float, float]
Velocity3D = typing.Tuple[float, float, float]
TimeStamps = typing.List[float]
LaunchParameter = typing.Tuple[float, ...]
PositionTrajectory = typing.List[Position3D]
VelocityTrajectory = typing.List[Velocity3D]
class TrajectoryData:
"""Data structure for storing and processing of trajectory data.
TrajectoryData stors by default the launch parameters, time stamps,
positions and velocities.
Stored variables can be fetched similar to Dicts via the keys or directly
as object of the structure.
"""
def __init__(self, trajectory_data: TrajectoryData = None) -> None:
"""Initiates trajectory data. Can be initialised on basis of
other trajectory data, where the given trajectory data is copied.
Args:
trajectory_data (TrajectoryData, optional): Optionally the new
TrajectoryData object can copy information from existing
TrajectoryData object. Defaults to None.
"""
self.start_time: Optional[float] = None
if trajectory_data:
self.time_stamps: TimeStamps = trajectory_data.time_stamps.copy()
self.positions: PositionTrajectory = trajectory_data.positions.copy()
self.velocities: VelocityTrajectory = trajectory_data.velocities.copy()
self.launch_param: LaunchParameter = trajectory_data.launch_param
else:
self.time_stamps = []
self.positions = []
self.velocities = []
self.launch_param = ()
def __getitem__(self, key: typing.Any) -> typing.Any:
"""Magic function for dict functionality.
Args:
key (typing.Any): Key name.
Returns:
_type_: Stored attribute.
"""
return getattr(self, key)
def __len__(self) -> int:
"""Magic function for usage of len-function. Returns number
of stored samples.
Returns:
int: Number of stored samples.
"""
return len(self.positions)
def __setitem__(self, key: typing.Any, value: typing.Any) -> None:
"""Magic function for setting new key.
Args:
key (typing.Any): Key name.
value (typing.Any): Key value.
"""
setattr(self, key, value)
def __contains__(self, key: typing.Any) -> bool:
"""Magic function for dict-functionality.
Args:
key (typing.Any): Stored parameter keys.
Returns:
bool: True, if parameter key is available.
"""
if hasattr(self, key):
return True
else:
return False
def set_launch_param(self, value: LaunchParameter) -> None:
"""Sets launch parameter of trajectory data.
Args:
value (LaunchParameter): Launch parameter of trajectory.
"""
self.launch_param = value
def reset(self) -> None:
"""Clears all values stored in trajectory data."""
self.start_time = None
self.time_stamps = []
self.positions = []
self.velocities = []
def append_sample(
self,
ball_id: int,
time_stamp: float,
position: Position3D,
velocity: Velocity3D = None,
) -> None:
"""Append single measurement sample of trajectory.
Args:
ball_id (int): ID of measurement system.
time_stamp (float): Time stamp of measurment.
position (Position3D): Position tuple.
velocity (Velocity3D, optional): Velocity tuple.
Defaults to None.
"""
if ball_id >= 0:
time_stamp = time_stamp
if self.start_time is None:
self.start_time = time_stamp
time_stamp -= self.start_time
self.time_stamps.append(time_stamp)
self.positions.append(position)
if velocity:
self.velocities.append(velocity)
def set_full_trajectory(
self,
time_stamps: TimeStamps,
positions: PositionTrajectory,
velocities: VelocityTrajectory = None,
launch_parameters: LaunchParameter = None,
) -> None:
"""Add full trajectory data with single function.
Args:
time_stamps (TimeStamps): Time stamps of trajectory.
positions (PositionTrajectory): Positions as tuple trajectory.
velocities (VelocityTrajectory, optional): Velocities. Defaults to None.
launch_parameters (LaunchParameter, optional): Launch parameter of ball
trajectory. Defaults to None.
"""
self.time_stamps = time_stamps
self.positions = positions
if velocities is not None:
self.velocities = velocities
if launch_parameters is not None:
self.set_launch_param(launch_parameters)
class TrajectoryCollection:
"""Collection of trajectory data objects which acts like
a list.
"""
def __init__(
self, trajectory_collection: Optional[TrajectoryCollection] = None
) -> None:
"""Generates trajectory collection. Can be initialised with another
trajectory collection, copying the items to new collection without
dependency.
Args:
trajectory_collection (Optional[TrajectoryCollection], optional):
Template collection. Defaults to None.
Raises:
TypeError: Is thrown, if given is not a trajectory collection.
"""
if trajectory_collection is None:
self._collection: List[TrajectoryData] = []
else:
if isinstance(trajectory_collection, TrajectoryCollection):
self._collection = copy.deepcopy(trajectory_collection.get_collection())
elif isinstance(trajectory_collection, list):
self._collection: List[TrajectoryData] = copy.deepcopy(
trajectory_collection
)
else:
raise TypeError(
"List or trajectory_collection expected,"
f"{type(trajectory_collection)} received."
)
def __len__(self) -> int:
"""Magic function enabling len-functionality.
Returns:
int: Number of collection items stored.
"""
return len(self._collection)
def __getitem__(self, index: int) -> TrajectoryCollection:
"""Magic function enabling squared brackets functionality.
Args:
index (int): Index range.
Raises:
TypeError: Raised if input is neither slice nor int.
Returns:
TrajectoryCollection: Slice or item of collection.
"""
if isinstance(index, slice):
return TrajectoryCollection(self._collection[index.start : index.stop])
elif isinstance(index, int):
return TrajectoryCollection([self._collection[index]])
else:
raise TypeError("Invalid argument type.")
def __iter__(self) -> Iterator[TrajectoryData]:
"""Magic function for for-list functionality.
Returns:
_type_: Iterator.
Yields:
Iterator[TrajectoryData]: Iterates to next item in collection list.
"""
return iter(self._collection)
def __contains__(self, value: TrajectoryData) -> bool:
"""Magic function used for dict functionality.
Args:
value (TrajectoryData): Key name.
Returns:
bool: True if key is available.
"""
if value in self._collection:
return True
else:
return False
def __setitem__(self, index: int, value: TrajectoryData) -> None:
self._collection[index] = value
def append(self, value: TrajectoryData) -> None:
"""Append trajectory data object to trajectory collection.
Args:
value (TrajectoryData): trajectory data object.
"""
assert isinstance(value, TrajectoryData)
self._collection.append(value)
def clear_collection(self) -> None:
"""Clears trajectory collection."""
self._collection = []
def get_collection(self) -> List[TrajectoryData]:
"""Returns collection items as list.
Returns:
List[TrajectoryData]: Trajectory data objects.
"""
return self._collection
def get_item(self, index: int) -> TrajectoryData:
"""Gets trajectory data object from specified index.
Args:
index (int): List index of collection element.
Returns:
TrajectoryData: Collection element.
"""
return self._collection[index]
def deepcopy(self) -> TrajectoryCollection:
"""Returns copy of trajectory collection without dependencies.
Returns:
TrajectoryCollection: Copied trajectory collection.
"""
return copy.deepcopy(TrajectoryCollection(self._collection))
def delete_item(self, index: int) -> None:
"""Deletes collection element from specified index.
Args:
index (int): Deletion index.
"""
del self._collection[index]
class TransformTimeUnits(Enum):
Seconds = 1e-9
Milliseconds = 1e-6
Microseconds = 1e-3
Nanoseconds = 1e-0
| intelligent-soft-robots/aimy_target_shooting | aimy_target_shooting/custom_types.py | custom_types.py | py | 9,510 | python | en | code | 0 | github-code | 13 |
32384191902 | class LoggerNames:
"""
A "enum" class fot the available logger names.
"""
CONTROLLER_C = "Controller_Component"
EXPERIMENT_C = "Experiment_Component"
INPUT_C = "Input_Component"
CONFIGINPUT_C = "ConfigInput_Component"
LOGGER_C = "Logger_Component"
Output_C = "Logger_Component" | BonifazStuhr/CSNN | LoggerNames.py | LoggerNames.py | py | 313 | python | en | code | 7 | github-code | 13 |
42016605861 | from typing import Dict, Tuple, Sequence, Any, Union
import numpy as np
import torch
from torch.utils.data import Dataset, TensorDataset, DataLoader
from ._force2d import force2d
DataTuple = None
def split_x_or_u(
x_or_u: torch.Tensor,
dim_x: int
) -> Tuple[torch.Tensor, torch.Tensor]:
select = {
'x': x_or_u[:, -1] == 0,
'u': x_or_u[:, -1] == 1,
}
x = x_or_u[select['x'], :dim_x]
u = x_or_u[select['u'], dim_x:-1]
return x, u
def split_u_or_y(
u_or_y: torch.Tensor,
dim_u: int
) -> Tuple[torch.Tensor, torch.Tensor]:
select = {
'u': u_or_y[:, -1] == 0,
'y': u_or_y[:, -1] == 1
}
u = u_or_y[select['u'], :dim_u]
y = u_or_y[select['y'], dim_u]
return u, y
def make_x_or_u(x: torch.Tensor, u: torch.Tensor) -> torch.Tensor:
return torch.cat([
torch.cat([x, torch.zeros([x.shape[0], u.shape[1]]), torch.zeros([x.shape[0], 1])], dim=1),
torch.cat([torch.zeros([u.shape[0], x.shape[1]]), u, torch.ones([u.shape[0], 1])], dim=1),
], dim=0)
def make_u_or_y(u: torch.Tensor, y: torch.Tensor) -> torch.Tensor:
if len(y.shape) == 1:
y = y[:, None]
return torch.cat([
torch.cat([u, torch.zeros([u.shape[0], y.shape[1]]), torch.zeros([u.shape[0], 1])], dim=1),
torch.cat([torch.zeros([y.shape[0], u.shape[1]]), y, torch.ones([y.shape[0], 1])], dim=1),
], dim=0)
def make_dataloader(data, batch_size, shuffle, pin_memory):
# type: (DataTuple, int, bool, bool) -> DataLoader[Tuple[torch.Tensor, ...]]
if isinstance(data, DataLoader):
return data
elif isinstance(data, Dataset):
dataset = data
elif isinstance(data, tuple) and len(data) == 2 \
and isinstance(data[0], torch.Tensor) \
and isinstance(data[1], torch.Tensor):
dataset = TensorDataset(data[0], data[1])
# elif isinstance(data, tuple) and len(tuple) == 2 \
# and isinstance(data[0], np.ndarray) \
# and isinstance(data[1], np.ndarray):
# tensor1 = torch.from_numpy(force2d(data[0])).float()
# tensor2 = torch.from_numpy(force2d(data[1])).float()
# dataset = TensorDataset(tensor1, tensor2)
else:
raise ValueError("make_dataloader only accepts torch.utils.data.DataLoader, torch.utils.data.Dataset, or a pair of torch.Tensor.")
return DataLoader(dataset,
batch_size=batch_size,
shuffle=shuffle,
pin_memory=pin_memory)
| i-yamane/mediated_uncoupled_learning | mu_learning/utils/_make_and_split.py | _make_and_split.py | py | 2,601 | python | en | code | 2 | github-code | 13 |
31237000954 | from discord.ext import commands, tasks
import discord
import json
from loguru import logger
import asyncio
from bs4 import BeautifulSoup
import aiohttp
import sys
from utils.utils import *
from utils.data import *
import math
import datetime
with open('data/server.json') as d:
server = json.load(d)
class Game(commands.Cog):
def __init__(self, client):
self.client = client
self.data = Data()
@commands.Cog.listener()
async def on_ready(self):
sys.setrecursionlimit(10000)
await asyncio.sleep(1800)
await self.cacheGuildWar.start()
@commands.command(aliases=["gw"])
async def guildWar(self, ctx, guild):
# gets guild war stats for guild
try:
logger.info(f"Getting guild war stats for {guild}.")
rankings = self.data.getGuildWar()
isGuild = False
for rank in range(1, len(rankings)):
if rankings[str(rank)][0][1:4] == guild:
guild = rankings[str(rank)][0]
score = rankings[str(rank)][1]
isGuild = True
break
if not isGuild:
return await ctx.send("```Guild not found! Make sure you have the right guild tag and the capitalization is correct.```")
embed = discord.Embed(title=f"⚔️ Guild War Ranking")
embed.add_field(name=guild, value=f"Ranking: #{rank}\nScore: {score}")
embed.set_footer(text=server["general"]["footer"], icon_url=self.client.user.avatar_url)
embed.set_image(url=server["general"]["guildWarBannerUrl"])
await ctx.send(embed=embed)
await logUsage(f"Guild war stats for {guild} requested by @{ctx.author.name}.", self.client)
except Exception as e:
await handleException(e, self.client)
@commands.command(aliases=["gwr"])
async def guildWarRaiders(self, ctx):
# gets guild war stats for raider guilds
try:
logger.info("Getting guild war stats for Raider guilds.")
rankings = self.data.getGuildWar()
text = "```"
for rank in range(1, len(rankings)):
if rankings[str(rank)][0][1:4].lower() == "rdr":
spaces1 = " " * (5 - len(str(rank)))
text += f"\n# {rank}{spaces1}{rankings[str(rank)][0]}\n Score: {rankings[str(rank)][1]}"
text += "```"
embed = discord.Embed(title="⚔️ Raiders Guild War Rankings")
embed.add_field(name="** **", value=text)
embed.set_footer(text=server["general"]["footer"], icon_url=self.client.user.avatar_url)
embed.set_image(url=server["general"]["bannerUrl"])
await ctx.send(embed=embed)
await logUsage(f"Guild war stats for Raiders requested by @{ctx.author.name}.", self.client)
except Exception as e:
await handleException(e, self.client)
@tasks.loop(seconds=1800)
async def cacheGuildWar(self):
# caches all guild war stats
leaderboard = {}
data = []
for offset in range(0, 5):
URL = server["general"]["links"]["guildWar"] + str(offset)
logger.info(f"Getting guild war stats: {URL}")
async with aiohttp.ClientSession() as session:
async with session.get(URL) as page:
html = await page.text()
soup = BeautifulSoup(html, 'html.parser')
soup = soup.find("tbody")
guilds = soup.findAll("td")
for g in range(len(guilds)):
content = guilds[g].contents
for c in range(len(content)):
content[c] = str(content[c])
content = "".join(content).strip()
content = "".join(content.split("\n"))
content = "".join(content.split("\t"))
soup = BeautifulSoup(content, "html.parser")
guilds[g] = str(soup.get_text())
data = data + guilds
leaderboard[1] = [data[1], data[2]]
leaderboard[2] = [data[4], data[5]]
leaderboard[3] = [data[7], data[8]]
i = 9
while (i < len(data)):
leaderboard[data[i]] = [data[i + 1], data[i + 2]]
i += 3
self.data.updateGuildWar(leaderboard)
@commands.command(aliases=["h"])
async def hero(self, ctx, *params):
try:
if len(params) == 0:
return await ctx.send("```You are missing required arguments! Check r.help for usage instructions.```")
starCount = [-1, -1]
hero = []
for p in params:
try:
num = int(p)
if num < 1 and starCount[0] == -1:
return await ctx.send("```Star counts must be greater than 0.```")
if num > 7:
return await ctx.send("```Star counts must be less than or equal to 7.```")
if starCount[0] == -1:
starCount[0] = num
else:
starCount[1] = num
except ValueError:
hero.append(p)
if starCount[0] == -1:
starCount[0] = 1
if starCount[1] == -1:
starCount[1] = 0
if starCount[0] < starCount[1]:
return await ctx.send("```Awakened stars cannot be greater than regular stars.```")
hero = self.getHero(" ".join(hero))
if hero == -1:
return await ctx.send("```Invalid hero!```")
await logUsage(f"Hero data for {hero} requested by @{ctx.author.name}.", self.client)
with open("data/encyclopedia.json") as f:
heroes = json.load(f)
heroData = heroes[hero]
emojis = self.data.getEmojis()
attack = float(heroData['Base Stats']['ATK'])
health = float(heroData['Base Stats']['HP'])
defense = float(heroData['Base Stats']['DEF'])
for i in range(starCount[0] - 1):
attack *= 2
health *= 2
defense *= 2
for i in range(starCount[1]):
attack *= 1.5
health *= 1.5
defense *= 1.5
attack = formatNum(attack)
health = formatNum(health)
defense = formatNum(defense)
aps = heroData["Base Stats"]["APS"]
speed = heroData["Base Stats"]["SPEED"]
arange = heroData["Base Stats"]["RANGE"]
ctkrate = heroData["Base Stats"]["CTK RATE"]
ctkdmg = heroData["Base Stats"]["CTK DMG"]
colors = {
"earth": "0x64B361",
"water": "0x3E90BF",
"fire": "0xE0291D",
"light": "0xF5DB43",
"dark": "0xAB57C1"
}
color = discord.Color(int(colors[heroData["Type"]], 16))
desc = f"**Rarity:** {heroData['Rarity']}\n**Attributes: {emojis[heroData['Type']]} {emojis[heroData['Job']]} {emojis[heroData['Gender']]}**"
if heroData["Special Ability"] != "":
desc += f"\n**Special:** {heroData['Special Ability']}"
embed = discord.Embed(
title=f"**{hero}**\n{emojis['awaken'] * starCount[1]}{emojis['star'] * (starCount[0] - starCount[1])}{emojis['blackStar'] * (7 - starCount[0])}",
color=color,
description=desc
)
embed.set_thumbnail(url=heroData['Image'])
embed.set_footer(text=server["general"]["footer"], icon_url=self.client.user.avatar_url)
embed.add_field(name=f"**{emojis['attack']} Attack**", value=f"{attack}")
embed.add_field(name=f"**{emojis['health']} Health**", value=f"{health}")
embed.add_field(name=f"**{emojis['defense']} Defense**", value=f"{defense}")
embed.add_field(name=f"**{emojis['aps']} APS**", value=f"{aps}")
embed.add_field(name=f"**{emojis['speed']} Speed**", value=f"{speed}")
embed.add_field(name=f"**{emojis['range']} Range**", value=f"{arange}")
embed.add_field(name=f"**{emojis['ctk rate']} Ctk Rate**", value=f"{ctkrate}")
embed.add_field(name=f"**{emojis['ctk dmg']} Ctk Dmg**", value=f"{ctkdmg}")
embed.add_field(name=f"** **", value=f"** **")
if heroData["SP2"] != "":
embed.add_field(name=f"**Ability**", value=f"```{heroData['SP2']}```", inline=False)
if heroData["SP3"] != "":
embed.add_field(name=f"**Ultimate**", value=f"```{heroData['SP3']}```", inline=False)
if heroData["SP4"] != "":
embed.add_field(name=f"**Passive**", value=f"```{heroData['SP4']}```", inline=False)
embed.add_field(name=f"** **", value=f"**Runes** ⏩", inline=False)
runeEmbed = discord.Embed(
title=f"{hero}\n{emojis['awaken'] * starCount[1]}{emojis['star'] * (starCount[0] - starCount[1])}{emojis['blackStar'] * (7 - starCount[0])}",
color=color
)
runeEmbed.set_thumbnail(url=heroData['Image'])
runeEmbed.set_footer(text=server["general"]["footer"], icon_url=self.client.user.avatar_url)
for runeSet in heroData["Target Rune Set"]:
runeEmbed.add_field(name=f"**{runeSet}**", value=f"```{heroData['Target Rune Set'][runeSet]}```", inline=False)
for runeSet in heroData["Target Rune Stats"]:
desc = '\n'.join(heroData['Target Rune Stats'][runeSet])
runeEmbed.add_field(name=f"**{runeSet}**", value=f"```{desc}```", inline=False)
runeEmbed.add_field(name=f"** **", value=f"⏪ **General**", inline=False)
left = '⏪'
right = '⏩'
pages = [embed, runeEmbed]
message = await ctx.send(embed=pages[0])
await message.add_reaction(left)
await message.add_reaction(right)
def check(reaction, user):
return user == ctx.message.author and reaction.message.id == message.id
i = 0
reaction = None
while True:
if str(reaction) == left:
i -= 1
if i < 0:
i = len(pages) - 1
await message.edit(embed=pages[i])
elif str(reaction) == right:
i += 1
if i > len(pages) - 1:
i = 0
await message.edit(embed=pages[i])
try:
reaction, user = await self.client.wait_for('reaction_add', timeout=300, check=check)
await message.remove_reaction(reaction, user)
except asyncio.TimeoutError:
break
await message.clear_reactions()
except Exception as e:
await handleException(e, self.client)
def getHero(self, h):
with open("data/encyclopedia.json") as f:
heroes = json.load(f)
h = h.lower()
shortVersions = {
"mm": "Monki Mortar",
"glad": "Gladiator",
"furi": "Furiosa"
}
for hero in heroes:
if h in shortVersions:
return shortVersions[h]
if h == hero.lower():
return hero
hero = hero.split(" ")
initials = ""
if len(hero) > 1:
for s in hero:
initials += s[0]
if h == initials.lower():
return " ".join(hero)
return -1
@commands.command(aliases=["cpr"])
async def compare(self, ctx, *params):
try:
if len(params) == 0:
return await ctx.send("```You are missing required arguments! Check r.help for usage instructions.```")
starCount = [-1, -1]
heroes = []
hero = []
params = " ".join(params).split(",")
for p in params:
p = p.strip().split(" ")
for part in p:
try:
num = int(part)
if num < 1 and starCount[0] == -1:
return await ctx.send("```Star counts must be greater than 0.```")
if num > 7:
return await ctx.send("```Star counts must be less than or equal to 7.```")
if starCount[0] == -1:
starCount[0] = num
else:
starCount[1] = num
except ValueError:
hero.append(part)
if starCount[0] == -1:
starCount[0] = 1
if starCount[1] == -1:
starCount[1] = 0
if starCount[0] < starCount[1]:
return await ctx.send("```Awakened stars cannot be greater than regular stars.```")
hero = " ".join(hero)
if self.getHero(hero) == -1:
typeHeroes = self.getType(hero, starCount)
if typeHeroes == []:
return await ctx.send("```Invalid hero or type!```")
heroes += typeHeroes
else:
heroes.append([self.getHero(hero), starCount])
hero = []
starCount = [-1, -1]
if len(heroes) < 1:
return await ctx.send("```Please include one or more heroes separated by commas (,) or a valid type.```")
heroList = []
for l in heroes:
heroList.append(l[0])
await logUsage(f"Comparing {', '.join(list(heroList))} requested by @{ctx.author.name}.", self.client)
with open("data/encyclopedia.json") as f:
encyclopedia = json.load(f)
emojis = self.data.getEmojis()
text = f""
for l in heroes:
hero = l[0]
starCount = l[1]
heroData = encyclopedia[hero]
attack = float(heroData['Base Stats']['ATK'])
health = float(heroData['Base Stats']['HP'])
defense = float(heroData['Base Stats']['DEF'])
for i in range(starCount[0] - 1):
attack *= 2
health *= 2
defense *= 2
for i in range(starCount[1]):
attack *= 1.5
health *= 1.5
defense *= 1.5
attack = formatNum(attack)
health = formatNum(health)
defense = formatNum(defense)
aps = heroData["Base Stats"]["APS"]
speed = heroData["Base Stats"]["SPEED"]
arange = heroData["Base Stats"]["RANGE"]
ctkrate = heroData["Base Stats"]["CTK RATE"]
ctkdmg = heroData["Base Stats"]["CTK DMG"]
text += \
f"{heroData['Emoji']} **{hero}** \n {starCount[0]}{emojis['star']}{starCount[1]}{emojis['awaken']} {emojis[heroData['Type']]}{emojis[heroData['Job']]}{emojis[heroData['Gender']]}" \
f"\n" \
f"{emojis['attack']} `{attack}` " \
f"{emojis['health']} `{health}` " \
f"{emojis['defense']} `{defense}` " \
f"{emojis['aps']} `{aps}` " \
f"{emojis['speed']} `{speed}` " \
f"{emojis['range']} `{arange}` " \
f"{emojis['ctk rate']} `{ctkrate}` " \
f"{emojis['ctk dmg']} `{ctkdmg}`\n"
text = text.split("\n")
t = 0
while t < len(text) - 1:
await ctx.send(text[t] + "\n" + text[t + 1] + "\n" + text[t + 2])
t += 3
if t != 0 and t % 24 == 0:
message = await ctx.send("React with ⏩ for more heroes.")
await message.add_reaction("⏩")
def check(reaction, user):
user == ctx.message.author and reaction.message.id == message.id
try:
reaction, user = await self.client.wait_for('reaction_add', timeout=300, check=check)
if reaction == "⏩":
await message.delete()
except asyncio.TimeoutError:
await message.clear_reactions()
break
except Exception as e:
await handleException(e, self.client)
def getType(self, type, starCount):
type = type.split(" ")
heroes = []
heroTypes = self.data.getHeroTypes()
with open("data/encyclopedia.json") as f:
encyclopedia = json.load(f)
isValid = heroes
for category in heroTypes:
for t in range(len(type)):
if type[t] in heroTypes[category]:
type[t] == True
for i in isValid:
if i != True:
return heroes
for hero in encyclopedia:
heroData = encyclopedia[hero]
attributes = [heroData["Job"], heroData["Type"], heroData["Gender"], heroData["Rarity"].lower()]
match = True
for t in type:
if not t in attributes:
match = False
if match == True:
inHeroes = False
for h in heroes:
if h[0] == hero:
inHeroes = True
break
if not inHeroes:
heroes.append([hero, starCount])
return heroes
@commands.command(aliases=["bc"])
async def blitzCalc(self, ctx, star1:int, medals:int, star2:int):
# calculates blitz resources needed for n medals
try:
if (not -1 < star1 and star1 < 8) or (not -1 < star2 and star2 < 8) or (star2 < star1):
return await ctx.send("```Please make sure your star counts are within 0 and 7 and the second star count is greater than the first. Ex: r.bc 6 234 7```")
medalData = [0, 10, 20, 50, 200, 600, 1500, 2500]
medalCount = medals
for i in range(0, star1+1):
medalCount += medalData[i]
medalGoal = 0
for i in range(0, star2+1):
medalGoal += medalData[i]
if medalGoal < medalCount:
return await ctx.send(f"```Your medal count surpasses that needed for {star2} stars. Please check and try again. Example usage: r.bc 6 234 7```")
floozPerChest = 120
medalsNeeded = medalGoal - medalCount
maxChests = math.ceil(medalsNeeded / 10.0)
maxFlooz = maxChests * floozPerChest
estimatedChests = math.ceil(medalsNeeded / 12.0)
estimatedFlooz = estimatedChests * floozPerChest
emojis = self.data.getEmojis()
images = self.data.getImages()
embed = discord.Embed(
title="Blitz Chest Calculator",
color=0xff009d,
description= f"From {star1}{emojis['star']} + {formatNum(medals)}{emojis['medal']} to {star2}{emojis['star']}, you need:"
)
embed.add_field(
name=f"**Estimated: **",
value=f"{formatNum(estimatedFlooz)} {emojis['flooz']} for {formatNum(estimatedChests)} {emojis['blitzchest']} (12{emojis['medal']} / {emojis['blitzchest']})",
inline=False
)
embed.add_field(
name=f"**At most: **",
value=f"{formatNum(maxFlooz)} {emojis['flooz']} for {formatNum(maxChests)} {emojis['blitzchest']} (10{emojis['medal']} / {emojis['blitzchest']})",
inline=False
)
embed.set_footer(text=server["general"]["footer"], icon_url=self.client.user.avatar_url)
embed.set_thumbnail(url=images["blitzchest"])
await ctx.send(embed=embed)
await logUsage(f"Blitz calc requested by @{ctx.author.name}.", self.client)
except Exception as e:
await handleException(e, self.client)
@commands.command(aliases=["cc"])
async def crusherCalc(self, ctx, star1:int, medals:int, star2:int):
# calculates crusher resources needed for n medals
try:
if (not -1 < star1 and star1 < 8) or (not -1 < star2 and star2 < 8) or (star2 < star1):
return await ctx.send("```Please make sure your star counts are within 0 and 7 and the second star count is greater than the first. Ex: r.bc 6 234 7```")
medalData = [0, 10, 20, 50, 200, 600, 1500, 2500]
medalCount = medals
for i in range(0, star1+1):
medalCount += medalData[i]
medalGoal = 0
for i in range(0, star2+1):
medalGoal += medalData[i]
if medalGoal < medalCount:
return await ctx.send(f"```Your medal count surpasses that needed for {star2} stars. Please check and try again. Example usage: r.bc 6 234 7```")
chests = [
[120, 10, 23, 13],
[240, 30, 50, 20],
[450, 50, 80, 30],
[900, 80, 127, 47],
[1800, 120, 188, 68]
]
medalsNeeded = medalGoal - medalCount
def whosent(m):
return m.author == ctx.author
emojis = ["1️⃣", "2️⃣", "3️⃣", "4️⃣", "5️⃣"]
message = await ctx.send("```How many crusher chests do you want to buy per rotation?```")
for e in emojis:
await message.add_reaction(e)
def check(reaction, user):
return user == ctx.message.author and str(reaction.emoji) in emojis and reaction.message.id == message.id
try:
reaction, user = await self.client.wait_for('reaction_add', check=check, timeout=60)
except asyncio.TimeoutError:
await ctx.send("```You took too long \:( Please try the command again.```")
return await message.clear_reactions()
for e in range(len(emojis)):
if str(reaction.emoji) == emojis[e]:
chestsPerRotation = e + 1
break
emojis = self.data.getEmojis()
images = self.data.getImages()
embed = discord.Embed(
title="Crusher Chest Calculator",
color=0xffd738,
description= f"Chests per rotation: {chestsPerRotation}\nFrom {star1}{emojis['star']} + {formatNum(medals)}{emojis['medal']} to {star2}{emojis['star']}, you need:"
)
# primary hero estimated
j = 0
estimatedMedals = medalsNeeded
estimatedChests, estimatedFlooz = 0, 0
while estimatedMedals > 0:
i = j
floozPerChest, medalsPerChest = chests[i][0], chests[i][2]
estimatedMedals -= medalsPerChest
estimatedFlooz += floozPerChest
estimatedChests += 1
j += 1
if j == chestsPerRotation:
j = 0
# primary hero max
j = 0
maxMedals = medalsNeeded
maxFlooz, maxChests = 0, 0
while maxMedals > 0:
i = j
floozPerChest, medalsPerChest = chests[i][0], chests[i][1]
maxMedals -= medalsPerChest
maxFlooz += floozPerChest
maxChests += 1
j += 1
if j == chestsPerRotation:
j = 0
# secondary hero estimated
j = 0
secondaryMedals = medalsNeeded
secondaryChests, secondaryFlooz = 0, 0
while secondaryMedals > 0:
i = j
floozPerChest, medalsPerChest = chests[i][0], chests[i][3]
secondaryMedals -= medalsPerChest
secondaryFlooz += floozPerChest
secondaryChests += 1
j += 1
if j == chestsPerRotation:
j = 0
# medals per rotation
j = chestsPerRotation
minMedalsPerRotation, estimatedMedalsPerRotation, secondaryMedalsPerRotation = 0, 0, 0
while j != 0:
i = j - 1
minMedalsPerRotation += chests[i][1]
estimatedMedalsPerRotation += chests[i][2]
secondaryMedalsPerRotation += chests[i][3]
j -= 1
embed.add_field(
name=f"__**Crusher Hero**__",
value=f"\n\n**Estimated: **{formatNum(estimatedFlooz)}{emojis['flooz']} for {formatNum(estimatedChests)}{emojis['crusherchest']} ({formatNum(estimatedMedalsPerRotation)}{emojis['medal']}/ rotation)" \
f"\n**At most: **{formatNum(maxFlooz)}{emojis['flooz']} for {formatNum(maxChests)}{emojis['crusherchest']} ({formatNum(minMedalsPerRotation)}{emojis['medal']}/ rotation)",
inline=False
)
embed.add_field(
name=f"__**Additional Heroes**__",
value=f"\n\n**Estimated: **{formatNum(secondaryFlooz)}{emojis['flooz']} for {formatNum(secondaryChests)}{emojis['crusherchest']} ({formatNum(secondaryMedalsPerRotation)}{emojis['medal']}/ rotation)",
inline=False
)
embed.set_footer(text=server["general"]["footer"], icon_url=self.client.user.avatar_url)
embed.set_thumbnail(url=images["crusherchest"])
await message.clear_reactions()
await message.edit(content="", embed=embed)
await logUsage(f"Crusher calc requested by @{ctx.author.name}.", self.client)
except Exception as e:
await handleException(e, self.client)
# TODO: @commands.command(aliases=["fc"])
async def fortuneChest(self, ctx, params):
# finds a hero in a fortune chest
try:
if len(params) == 0:
return await ctx.send("```You are missing required arguments! Check r.help for usage instructions.```")
hero = self.getHero(" ".join(params))
if hero == -1:
return await ctx.send("```Invalid hero!```")
await logUsage(f"Fortune chest for {hero} requested by @{ctx.author.name}.", self.client)
except Exception as e:
await handleException(e, self.client)
@tasks.loop(minutes=1)
async def updateFortuneChest(self):
# updates fortune chest heroes
try:
time = str(datetime.utcnow())
if time[11:16] == "00:00" or time[11:16] == "00:01":
chest = self.data.getFortuneChest()
for i in range(len(chest["current"])):
chest["current"][i] = chest["current"][i] + 1
if chest["current"][0] >= len(chest["common"]):
chest["current"][0] = 0
if chest["current"][1] >= len(chest["rare"]):
chest["current"][1] = 0
if chest["current"][2] >= len(chest["epic"]):
chest["current"][2] = 0
self.data.updateFortuneChest(chest)
await logEvent("Fortune chest updated.", self.client)
await asyncio.sleep(1200)
except Exception as e:
await handleException(e, self.client)
def setup(client):
client.add_cog(Game(client)) | amymainyc/raider-bot-public | cogs/game.py | game.py | py | 28,887 | python | en | code | 0 | github-code | 13 |
2887443531 | import os
import time
import shutil
import time
import json
import random
import time
import argparse
import numpy as np
## torch packages
import torch
import torch.nn.functional as F
from torch.autograd import Variable
from torch.utils.tensorboard import SummaryWriter
import torch.nn as nn
from transformers import get_linear_schedule_with_warmup
from easydict import EasyDict as edict
## for visualisation
import matplotlib.pyplot as plt
## custom
from eval_multilabel import eval_model
from select_model_input import select_model,select_input
import dataset
import config_multilabel as train_config
from label_dict import ed_emo_dict
from utils import save_checkpoint,clip_gradient
def train_epoch(model, train_iter, epoch,loss_fn,optimizer,log_dict):
total_epoch_loss = 0
total_epoch_acc = 0
model.cuda()
steps = 0
model.train()
start_train_time = time.time()
for idx, batch in enumerate(train_iter):
text, attn, target = select_input(batch,log_dict.param)
if (len(target)is not log_dict.param.batch_size):# Last batch may have length different than log_dict.param.batch_size
continue
if torch.cuda.is_available():
text = [text[0].cuda(),text[1].cuda(),text[2].cuda(),text[3].cuda()]
attn = attn.cuda()
target = target.cuda()
## model prediction
model.zero_grad()
optimizer.zero_grad()
# print("Prediction")
prediction = model(text,attn)
# print("computing loss")
loss = loss_fn(prediction, target)
# print("Loss backward")
startloss = time.time()
loss.backward()
# print(time.time()-startloss,"Finish loss")
clip_gradient(model, 1e-1)
# torch.nn.utils.clip_grad_norm_(model.parameters(),1)
optimizer.step()
# print("=====================")
steps += 1
if steps % 100 == 0:
print (f'Epoch: {epoch+1:02}, Idx: {idx+1}, Training Loss: {loss.item():.4f}, Time taken: {((time.time()-start_train_time)/60): .2f} min')
start_train_time = time.time()
total_epoch_loss += loss.item()
# break
return total_epoch_loss/len(train_iter)
def train_model(log_dict,data,model,loss_fn,optimizer,lr_scheduler,writer,save_home):
best_f1_score = 0
patience_flag = 0
train_iter,valid_iter,test_iter = data[0],data[1],data[2] # data is a tuple of three iterators
# print("Start Training")
for epoch in range(0,log_dict.param.nepoch):
## train and validation
train_loss = train_epoch(model, train_iter, epoch,loss_fn,optimizer,log_dict)
val_loss, val_result = eval_model(model, valid_iter,loss_fn,log_dict,save_home)
print(f'Epoch: {epoch+1:02}, Train Loss: {train_loss:.3f}, Val. Loss: {val_loss:3f}, Val. F1: {val_result["f1"]:.2f}')
## testing
test_loss, test_result = eval_model(model, test_iter,loss_fn,log_dict,save_home)
print(f'Test Loss: {test_loss:.3f}, Test F1 score: {test_result["f1"]:.4f}')
## save best model
is_best = val_result["f1"] > best_f1_score
save_checkpoint({'epoch': epoch + 1,'arch': log_dict.param.arch_name,'state_dict': model.state_dict(),'train_loss':train_loss,"val_result":val_result,'param':dict(log_dict.param),'optimizer' : optimizer.state_dict()},is_best,save_home+"/model_best.pth.tar")
best_f1_score = max(val_result["f1"], best_f1_score)
if log_dict.param.step_size != None:
lr_scheduler.step()
## save logs
if is_best:
patience_flag = 0
log_dict["test_result"] = test_result
log_dict["valid_result"] = val_result
log_dict["train_loss"] = train_loss
log_dict["test_loss"] = test_loss
log_dict["valid_loss"] = val_loss
log_dict["epoch"] = epoch+1
with open(save_home+"/log.json", 'w') as fp:
json.dump(dict(log_dict), fp,indent=4)
fp.close()
else:
patience_flag += 1
## early stopping
if patience_flag == log_dict.param.patience or epoch == log_dict.param.nepoch-1:
print(log_dict)
break
if __name__ == '__main__':
log_dict = edict({})
log_dict.param = train_config.param
if train_config.tuning:
## Initialising parameters from train_config
for arch_name in ["kea_electra"]:
for learning_rate in [3e-05]: ## for tuning
## replace parameters based on tuning
log_dict.param.learning_rate = learning_rate
log_dict.param.arch_name = arch_name
np.random.seed(0)
random.seed(0)
torch.manual_seed(0)
torch.cuda.manual_seed(0)
torch.cuda.manual_seed_all(0)
## Loading data
print('Loading dataset')
start_time = time.time()
train_iter, valid_iter ,test_iter= dataset.get_dataloader(log_dict.param.batch_size,log_dict.param.dataset,log_dict.param.arch_name)
data = (train_iter,valid_iter,test_iter)
finish_time = time.time()
print('Finished loading. Time taken:{:06.3f} sec'.format(finish_time-start_time))
## Initialising model, loss, optimizer, lr_scheduler
model = select_model(log_dict.param)
loss_fn = nn.BCEWithLogitsLoss()
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()),lr=log_dict.param.learning_rate)
if log_dict.param.step_size != None:
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer,log_dict.param.step_size, gamma=0.5)
## Filepaths for saving the model and the tensorboard runs
model_run_time = time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
writer = SummaryWriter("./runs/"+arch_name+"/")
save_home = "./save/"+log_dict.param.dataset+"/"+log_dict.param.arch_name+"/"+model_run_time
# print(train_config)
train_model(log_dict,data,model,loss_fn,optimizer,lr_scheduler,writer,save_home)
else:
## all parameters as set in the config_multilabel.py
np.random.seed(0)
random.seed(0)
torch.manual_seed(0)
torch.cuda.manual_seed(0)
torch.cuda.manual_seed_all(0)
## Loading data
print('Loading dataset')
start_time = time.time()
train_iter, valid_iter ,test_iter= dataset.get_dataloader(log_dict.param.batch_size,log_dict.param.dataset,log_dict.param.arch_name)
data = (train_iter,valid_iter,test_iter)
finish_time = time.time()
print('Finished loading. Time taken:{:06.3f} sec'.format(finish_time-start_time))
## Initialising model, loss, optimizer, lr_scheduler
model = select_model(log_dict.param)
loss_fn = nn.BCEWithLogitsLoss()
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()),lr=log_dict.param.learning_rate)
if log_dict.param.step_size != None:
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer,log_dict.param.step_size, gamma=0.5)
## Filepaths for saving the model and the tensorboard runs
model_run_time = time.strftime("%Y_%m_%d_%H_%M_%S", time.localtime())
writer = SummaryWriter("./runs/"+log_dict.param.arch_name+"/")
save_home = "./save/"+log_dict.param.dataset+"/"+log_dict.param.arch_name+"/"+model_run_time
train_model(log_dict,data,model,loss_fn,optimizer,lr_scheduler,writer,save_home)
| varsha33/Fine-Grained-Emotion-Recognition | train_mutlilabel.py | train_mutlilabel.py | py | 6,794 | python | en | code | 6 | github-code | 13 |
71814578898 | from sqlalchemy import create_engine
from sqlalchemy import and_
from sqlalchemy.orm import sessionmaker
from models import Color, State, Team, Player
engine = create_engine(
'postgresql://postgres:passw0rd@localhost:5432/ACC_BBALL')
session = sessionmaker(bind=engine)()
def query1(
use_mpg, min_mpg, max_mpg,
use_ppg, min_ppg, max_ppg,
use_rpg, min_rpg, max_rpg,
use_apg, min_apg, max_apg,
use_spg, min_spg, max_spg,
use_bpg, min_bpg, max_bpg
):
players = session.query(Player)
if (use_mpg):
players = players.filter(
and_(Player.mpg >= min_mpg, Player.mpg <= max_mpg))
if (use_ppg):
players = players.filter(
and_(Player.ppg >= min_ppg, Player.ppg <= max_ppg))
if (use_rpg):
players = players.filter(
and_(Player.rpg >= min_rpg, Player.rpg <= max_rpg))
if (use_apg):
players = players.filter(
and_(Player.apg >= min_apg, Player.apg <= max_apg))
if (use_spg):
players = players.filter(
and_(Player.spg >= min_spg, Player.spg <= max_spg))
if (use_bpg):
players = players.filter(
and_(Player.bpg >= min_bpg, Player.bpg <= max_bpg))
print('PLAYER_ID TEAM_ID UNIFORM_NUM FIRST_NAME LAST_NAME MPG PPG RPG APG SPG BPG')
for player in players:
print(player.player_id, player.team_id, player.uniform_num,
player.first_name, player.last_name,
player.mpg, player.ppg, player.rpg, player.apg,
"{:.1f}".format(player.spg), "{:.1f}".format(player.bpg)
)
return
def query2(team_color):
color = session.query(Color).filter(Color.name == team_color)[0]
teams = session.query(Team).filter(Team.color_id == color.color_id)
print('NAME')
for team in teams:
print(team.name)
return
def query3(team_name):
team = session.query(Team).filter(Team.name == team_name)[0]
players = session.query(Player).filter(
Player.team_id == team.team_id).order_by(Player.ppg.desc())
print('FIRST_NAME LAST_NAME')
for player in players:
print(player.first_name, player.last_name)
return
def query4(team_state, team_color):
state = session.query(State).filter(State.name == team_state)[0]
color = session.query(Color).filter(Color.name == team_color)[0]
teams = session.query(Team).filter(
and_(Team.state_id == state.state_id, Team.color_id == color.color_id)
)
teamIdsofState = [team.team_id for team in teams]
players = session.query(Player).filter(Player.team_id.in_(teamIdsofState))
print('UNIFORM_NUM FIRST_NAME LAST_NAME')
for player in players:
print(player.uniform_num, player.first_name, player.last_name)
return
def query5(num_wins):
teams = session.query(Team).filter(Team.wins > num_wins)
teamIdsofWins = [team.team_id for team in teams]
players = session.query(Player).filter(Player.team_id.in_(teamIdsofWins))
print('FIRST_NAME LAST_NAME NAME WINS')
for player in players:
playerTeam = session.query(Team).filter(
Team.team_id == player.team_id)[0]
print(player.first_name, player.last_name,
playerTeam.name, playerTeam.wins)
return
| YUME-FF/Database_Programming | extraCredit/query_funcs.py | query_funcs.py | py | 3,265 | python | en | code | 0 | github-code | 13 |
17112410926 | import discord
from discord import app_commands
from discord.ext import commands
import random
# import our global settings file
import settings
class Chat(commands.Cog):
def __init__(self, bot):
self.bot = bot
@app_commands.command(name="flip")
async def flip(self, interaction: discord.Interaction):
"""Flip a coin!"""
if random.choice(["Heads", "Tails"]) == "Heads":
with open('./img/coin-heads.png', 'rb') as f:
picture = discord.File(f)
await interaction.response.send_message(file=picture)
else:
with open('./img/coin-tails.png', 'rb') as f:
picture = discord.File(f)
await interaction.response.send_message(file=picture)
@app_commands.command(name="roll")
async def roll(self, interaction: discord.Interaction, amount: int = 3, sides: int = 6):
"""Roll some dice!"""
if sides < 1 or sides > settings.settings["maxDieSize"]:
await interaction.response.send_message(f"Sides must be > 0 and <= {settings.settings['maxDieSize']}",
ephemeral=True)
return
if amount < 1 or amount > settings.settings["maxDice"]:
await interaction.response.send_message(f"Amount of dice must be >0 and <= {settings.settings['maxDice']}",
ephemeral=True)
return
message = "Your rolls:"
for die in range(amount):
result = random.randint(1, sides)
if die % settings.settings["rollsPerLine"] == 0:
message += '\n'
message += f"{result}\t"
await interaction.response.send_message(message)
@app_commands.command(name="rofl")
async def rofl(self, interaction: discord.Interaction):
"""ROFLCOPTER!"""
with open('./img/roflcopter.gif', 'rb') as f:
picture = discord.File(f)
await interaction.response.send_message(file=picture)
# add our cog to the bot, so it's all run on startup.
async def setup(bot):
await bot.add_cog(Chat(bot))
| stapler8/Cadence | src/chat.py | chat.py | py | 2,169 | python | en | code | 0 | github-code | 13 |
37562915348 | import pymongo
import tkinter as tk
from tkinter import messagebox
#variables & mongodb connect
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
mydb = myclient["enrollmentsystem"]
mycol = mydb["teachers"]
lst = [['ID' , 'Name', 'Dept' , 'Contact']]
#assign to table
def callback(event):
li=[]
li=event.widget._values
tid.set(lst[li[1]][0])
tname.set(lst[li[1]][1])
tdept.set(lst[li[1]][2])
tnum.set(lst[li[1]][3])
#create table
def creategrid(n):
lst.clear()
lst.append(['ID' , 'Name', 'Dept' , 'Number'])
cursor = mycol.find({})
for text_fromDB in cursor:
studid = str(text_fromDB['tid'])
studname = str(text_fromDB['tname'].encode('utf-8').decode('utf-8'))
studemail = str(text_fromDB['tdept'].encode('utf-8').decode('utf-8'))
studcourse = str(text_fromDB['tnum'])
lst.append([studid,studname,studemail,studcourse])
for i in range(len(lst)):
for j in range(len(lst[0])):
mgrid = tk.Entry(window,width=10)
mgrid.insert(tk.END,lst[i][j])
mgrid._values = mgrid.get(), i
mgrid.grid(row=i+7, column=j+6)
mgrid.bind("<Button-1>", callback)
if n == 1:
for label in window.grid_slaves():
if int(label.grid_info()["row"]) > 6:
label.grid_forget()
#messagebox
def msgbox(msg,titlebar):
result=messagebox.askokcancel(title=titlebar,message=msg)
return result
def tsave():
r=msgbox("Save record?","Record")
if r == True:
newid = mycol.count_documents({})
if newid!=0:
newid=mycol.find_one(sort=[("tid", -1)])["tid"]
id=newid+1
tid.set(int(id))
mydict = {"tid":int(teachid.get()), "tname":teachname.get(), "tdept":teachdept.get(), "tnum":teachnum.get()}
x = mycol.insert_one(mydict)
creategrid(1)
creategrid(0)
def tdelete():
r=msgbox("Delete?","record")
if r == True:
myquery = {"tid":int(teachid.get())}
mycol.delete_one(myquery)
creategrid(1)
creategrid(0)
def tupdate():
r=msgbox("Update?","record")
if r == True:
myquery = {"tid": int(teachid.get())}
newvalues = {"$set":{"tname": teachname.get()}}
mycol.update_one(myquery,newvalues)
newvalues = {"$set":{"tdept": teachdept.get()}}
mycol.update_one(myquery,newvalues)
newvalues = {"$set":{"tnum": teachnum.get()}}
mycol.update_one(myquery,newvalues)
creategrid(1)
creategrid(0)
def tfilters():
creategrid(1)
mongoquery = mydb.get_collection("teachers")
startname = '^' + ftsname.get()
#startemail = '^' + femail.get()
endname = ftname.get() + '$'
selected = options.get()
# x= int(filterid.get())
result = mongoquery.find({"$and": [{"tid":{selected:fid.get()}},{"tname":{'$regex':endname}}, {"tname":{'$regex':startname}}, {"tdept":{'$regex':fdept.get()}}, {"tnum":{'$regex':fnum.get()}} ] })
print(ftname.get())
print(selected)
lst.clear()
lst.append(['ID' , 'Name', 'Dept' , 'Number'])
for text_fromDB in result:
studid = str(text_fromDB['tid'])
studname = str(text_fromDB['tname'].encode('utf-8').decode('utf-8'))
studemail = str(text_fromDB['tdept'].encode('utf-8').decode('utf-8'))
studcourse = str(text_fromDB['tnum'])
lst.append([studid,studname,studemail,studcourse])
for i in range(len(lst)):
for j in range(len(lst[0])):
mgrid = tk.Entry(window,width=10)
mgrid.insert(tk.END,lst[i][j])
mgrid._values = mgrid.get(), i
mgrid.grid(row=i+7, column=j+6)
mgrid.bind("<Button-1>", callback)
if n == 1:
for label in window.grid_slaves():
if int(label.grid_info()["row"]) > 6:
label.grid_forget()
window = tk.Tk()
window.title("Teachers Form")
window.geometry("1050x400")
window.configure(bg="light blue")
#title hehe
label = tk.Label(window, text = "Teachers Form", width = 30 , height = 1, bg = "pink" , anchor="center")
label.config(font=("Courier",10))
label.grid(column=2,row=1)
#Teacher id
label = tk.Label(window, text = "Teachers ID", width = 15 , height = 1, bg = "pink")
label.grid(column=1,row=2)
tid=tk.StringVar(window)
teachid = tk.Entry(window , textvariable=tid)
teachid.grid(column=2,row=2)
teachid.configure(state=tk.DISABLED)
#Teacher name
label2 = tk.Label(window, text = "Teacher Name", width = 15 , height = 1, bg = "pink")
label2.grid(column=1,row=3)
tname=tk.StringVar(window)
teachname = tk.Entry(window , textvariable=tname)
teachname.grid(column=2,row=3)
#Department
label3 = tk.Label(window, text = "Department", width = 15 , height = 1, bg = "pink")
label3.grid(column=1,row=4)
tdept=tk.StringVar(window)
teachdept = tk.Entry(window , textvariable=tdept)
teachdept.grid(column=2,row=4)
#Contact
label4 = tk.Label(window, text = "Contact Num", width = 15 , height = 1, bg = "pink")
label4.grid(column=1,row=5)
tnum=tk.StringVar(window)
teachnum = tk.Entry(window , textvariable=tnum)
teachnum.grid(column=2,row=5)
#create table
creategrid(0)
#Buttons
savebtn = tk.Button(window,text = "Save", command=tsave)
savebtn.grid(column=1, row=6)
delbtn = tk.Button(window,text = "Delete", command=tdelete)
delbtn.grid(column=2, row=6)
upbtn = tk.Button(window,text = "Update", command=tupdate)
upbtn.grid(column=3, row=6)
fltrbtn = tk.Button(window,text = "Filter", command=tfilters)
fltrbtn.grid(column=10, row=6)
#filter
n = tk.StringVar(window) #secret
options = tk.StringVar(window)
options.trace_add('write', lambda *args: print(options.get()))
options.set('$gt')
drop = tk.OptionMenu( window , options ,'$gt', '$lt', '$gte' ,'$lte','$eq','$ne')
drop.grid(column=6, row=6)
#textbox for filter
#id
label8 = tk.Label(window, text = "Filter ID", width = 10 , height = 1, bg = "pink")
label8.grid(column=6,row=4)
fid=tk.IntVar(window)
filterid = tk.Entry(window , textvariable=fid, width= 10)
filterid.grid(column=6,row=5)
#Start Name
label7 = tk.Label(window, text = "StartName", width = 10 , height = 1, bg = "pink")
label7.grid(column=7,row=3)
ftsname=tk.StringVar(window)
filtersname = tk.Entry(window , textvariable=ftsname, width= 10)
filtersname.grid(column=7,row=4)
#EndName
label5 = tk.Label(window, text = "Endname", width = 10 , height = 1, bg = "pink")
label5.grid(column=7,row=5)
ftname=tk.StringVar(window)
filtername = tk.Entry(window ,width= 10 , textvariable=ftname)
filtername.grid(column=7,row=6)
#email
label6 = tk.Label(window, text = "Start Email", width = 10 , height = 1, bg = "pink")
label6.grid(column=8,row=5)
fdept=tk.StringVar(window)
filteremail = tk.Entry(window , textvariable=fdept, width= 10)
filteremail.grid(column=8,row=6)
#number
label6 = tk.Label(window, text = "Number", width = 10 , height = 1, bg = "pink")
label6.grid(column=9,row=5)
fnum=tk.StringVar(window)
filtercourse = tk.Entry(window , textvariable=fnum, width= 10)
filtercourse.grid(column=9,row=6)
window.mainloop() | miggydai/PythonEnrollmentSystem_wTkinterandMongodb | funda/TeachersForm.py | TeachersForm.py | py | 7,125 | python | en | code | 1 | github-code | 13 |
480506770 | import ipaddress
async def handle_subnet(ip: str, mask: str):
network = ipaddress.ip_network(f"{ip}/{mask}", strict=False)
net_addr = str(network.network_address)
broadcast_addr = str(network.broadcast_address)
usable_range = f"{str(network[1])} - {str(network[-2])}"
host_count = network.num_addresses
response = f"**Here are the details for subnet {network}**: \n\n**Network address**: {net_addr}\n**Broadcast address**: {broadcast_addr}\n**Usable IP range**: {usable_range}\n**Number of hosts**: {host_count}"
return response | CyberSentinels/discord-cyber-scenario-bot | features/subnet/handle_subnet.py | handle_subnet.py | py | 557 | python | en | code | 4 | github-code | 13 |
1690769944 | from flask_app.config.mysqlconnection import MySQLConnection, connectToMySQL
class Dojo:
def __init__(self, data):
self.id = data['id']
self.name = data['name']
self.created_at = data['created_at']
self.updated_at = data['updated_at']
# define a class method which queries and returns all dojos in the database
@classmethod
def get_all(cls):
# store the query string in a variable
query = "SELECT * FROM dojos"
# query the database using the variable we just made, and store the results within the 'results' variable
results = connectToMySQL('dojos_and_ninjas').query_db(query)
# we will use this empty list to store each dojo in our database
dojos = []
# iterate over the results from our query, and append them to the dojos list
for dojo in results:
dojos.append(cls(dojo))
# after we are done iterating over the query resu lts, return the list of dojos
return dojos
@classmethod
def get_one(cls, data):
print("getting one dojo...")
query = '''
SELECT * FROM dojos
WHERE id = %(id)s
'''
result = connectToMySQL('dojos_and_ninjas').query_db(query, data)
return cls(result[0])
@classmethod
def save(cls, data):
query = """
INSERT INTO dojos (name, created_at, updated_at)
VALUES (%(dojo_name)s, NOW(), NOW() );
"""
return connectToMySQL('dojos_and_ninjas').query_db(query, data)
@classmethod
def ninja_in_dojo(cls, data):
query = '''
SELECT * FROM dojos
JOIN ninjas ON dojos.id=ninjas.dojo_id
WHERE ninjas.id = %(id)s;
'''
result = connectToMySQL('dojos_and_ninjas').query_db(query, data)
return cls(result[0])
@classmethod
def delete_ninjas_in_dojo(cls, data):
query = '''
DELETE FROM ninjas
WHERE dojo_id = %(id)s
'''
return connectToMySQL('dojos_and_ninjas').query_db(query, data)
@classmethod
def delete_dojo(cls, data):
cls.delete_ninjas_in_dojo(data)
query = '''
DELETE FROM dojos
WHERE id = %(id)s;
'''
return connectToMySQL('dojos_and_ninjas').query_db(query, data) | code-Brian/Dojos_and_Ninjas | flask_app/models/dojo.py | dojo.py | py | 2,387 | python | en | code | 0 | github-code | 13 |
17052905524 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class InputInvoiceOrderRequest(object):
def __init__(self):
self._buyer_inst_id = None
self._currency_code = None
self._exclude_tax_invoice_amt = None
self._invoice_amt = None
self._invoice_code = None
self._invoice_date = None
self._invoice_material = None
self._invoice_no = None
self._invoice_receive_date = None
self._invoice_source = None
self._invoice_type = None
self._operator = None
self._out_bill_no = None
self._out_bill_type = None
self._relate_amount = None
self._relate_tax_amt = None
self._seller_ip_role_id = None
self._source = None
self._tax_amt = None
self._tax_rate = None
@property
def buyer_inst_id(self):
return self._buyer_inst_id
@buyer_inst_id.setter
def buyer_inst_id(self, value):
self._buyer_inst_id = value
@property
def currency_code(self):
return self._currency_code
@currency_code.setter
def currency_code(self, value):
self._currency_code = value
@property
def exclude_tax_invoice_amt(self):
return self._exclude_tax_invoice_amt
@exclude_tax_invoice_amt.setter
def exclude_tax_invoice_amt(self, value):
self._exclude_tax_invoice_amt = value
@property
def invoice_amt(self):
return self._invoice_amt
@invoice_amt.setter
def invoice_amt(self, value):
self._invoice_amt = value
@property
def invoice_code(self):
return self._invoice_code
@invoice_code.setter
def invoice_code(self, value):
self._invoice_code = value
@property
def invoice_date(self):
return self._invoice_date
@invoice_date.setter
def invoice_date(self, value):
self._invoice_date = value
@property
def invoice_material(self):
return self._invoice_material
@invoice_material.setter
def invoice_material(self, value):
self._invoice_material = value
@property
def invoice_no(self):
return self._invoice_no
@invoice_no.setter
def invoice_no(self, value):
self._invoice_no = value
@property
def invoice_receive_date(self):
return self._invoice_receive_date
@invoice_receive_date.setter
def invoice_receive_date(self, value):
self._invoice_receive_date = value
@property
def invoice_source(self):
return self._invoice_source
@invoice_source.setter
def invoice_source(self, value):
self._invoice_source = value
@property
def invoice_type(self):
return self._invoice_type
@invoice_type.setter
def invoice_type(self, value):
self._invoice_type = value
@property
def operator(self):
return self._operator
@operator.setter
def operator(self, value):
self._operator = value
@property
def out_bill_no(self):
return self._out_bill_no
@out_bill_no.setter
def out_bill_no(self, value):
self._out_bill_no = value
@property
def out_bill_type(self):
return self._out_bill_type
@out_bill_type.setter
def out_bill_type(self, value):
self._out_bill_type = value
@property
def relate_amount(self):
return self._relate_amount
@relate_amount.setter
def relate_amount(self, value):
self._relate_amount = value
@property
def relate_tax_amt(self):
return self._relate_tax_amt
@relate_tax_amt.setter
def relate_tax_amt(self, value):
self._relate_tax_amt = value
@property
def seller_ip_role_id(self):
return self._seller_ip_role_id
@seller_ip_role_id.setter
def seller_ip_role_id(self, value):
self._seller_ip_role_id = value
@property
def source(self):
return self._source
@source.setter
def source(self, value):
self._source = value
@property
def tax_amt(self):
return self._tax_amt
@tax_amt.setter
def tax_amt(self, value):
self._tax_amt = value
@property
def tax_rate(self):
return self._tax_rate
@tax_rate.setter
def tax_rate(self, value):
self._tax_rate = value
def to_alipay_dict(self):
params = dict()
if self.buyer_inst_id:
if hasattr(self.buyer_inst_id, 'to_alipay_dict'):
params['buyer_inst_id'] = self.buyer_inst_id.to_alipay_dict()
else:
params['buyer_inst_id'] = self.buyer_inst_id
if self.currency_code:
if hasattr(self.currency_code, 'to_alipay_dict'):
params['currency_code'] = self.currency_code.to_alipay_dict()
else:
params['currency_code'] = self.currency_code
if self.exclude_tax_invoice_amt:
if hasattr(self.exclude_tax_invoice_amt, 'to_alipay_dict'):
params['exclude_tax_invoice_amt'] = self.exclude_tax_invoice_amt.to_alipay_dict()
else:
params['exclude_tax_invoice_amt'] = self.exclude_tax_invoice_amt
if self.invoice_amt:
if hasattr(self.invoice_amt, 'to_alipay_dict'):
params['invoice_amt'] = self.invoice_amt.to_alipay_dict()
else:
params['invoice_amt'] = self.invoice_amt
if self.invoice_code:
if hasattr(self.invoice_code, 'to_alipay_dict'):
params['invoice_code'] = self.invoice_code.to_alipay_dict()
else:
params['invoice_code'] = self.invoice_code
if self.invoice_date:
if hasattr(self.invoice_date, 'to_alipay_dict'):
params['invoice_date'] = self.invoice_date.to_alipay_dict()
else:
params['invoice_date'] = self.invoice_date
if self.invoice_material:
if hasattr(self.invoice_material, 'to_alipay_dict'):
params['invoice_material'] = self.invoice_material.to_alipay_dict()
else:
params['invoice_material'] = self.invoice_material
if self.invoice_no:
if hasattr(self.invoice_no, 'to_alipay_dict'):
params['invoice_no'] = self.invoice_no.to_alipay_dict()
else:
params['invoice_no'] = self.invoice_no
if self.invoice_receive_date:
if hasattr(self.invoice_receive_date, 'to_alipay_dict'):
params['invoice_receive_date'] = self.invoice_receive_date.to_alipay_dict()
else:
params['invoice_receive_date'] = self.invoice_receive_date
if self.invoice_source:
if hasattr(self.invoice_source, 'to_alipay_dict'):
params['invoice_source'] = self.invoice_source.to_alipay_dict()
else:
params['invoice_source'] = self.invoice_source
if self.invoice_type:
if hasattr(self.invoice_type, 'to_alipay_dict'):
params['invoice_type'] = self.invoice_type.to_alipay_dict()
else:
params['invoice_type'] = self.invoice_type
if self.operator:
if hasattr(self.operator, 'to_alipay_dict'):
params['operator'] = self.operator.to_alipay_dict()
else:
params['operator'] = self.operator
if self.out_bill_no:
if hasattr(self.out_bill_no, 'to_alipay_dict'):
params['out_bill_no'] = self.out_bill_no.to_alipay_dict()
else:
params['out_bill_no'] = self.out_bill_no
if self.out_bill_type:
if hasattr(self.out_bill_type, 'to_alipay_dict'):
params['out_bill_type'] = self.out_bill_type.to_alipay_dict()
else:
params['out_bill_type'] = self.out_bill_type
if self.relate_amount:
if hasattr(self.relate_amount, 'to_alipay_dict'):
params['relate_amount'] = self.relate_amount.to_alipay_dict()
else:
params['relate_amount'] = self.relate_amount
if self.relate_tax_amt:
if hasattr(self.relate_tax_amt, 'to_alipay_dict'):
params['relate_tax_amt'] = self.relate_tax_amt.to_alipay_dict()
else:
params['relate_tax_amt'] = self.relate_tax_amt
if self.seller_ip_role_id:
if hasattr(self.seller_ip_role_id, 'to_alipay_dict'):
params['seller_ip_role_id'] = self.seller_ip_role_id.to_alipay_dict()
else:
params['seller_ip_role_id'] = self.seller_ip_role_id
if self.source:
if hasattr(self.source, 'to_alipay_dict'):
params['source'] = self.source.to_alipay_dict()
else:
params['source'] = self.source
if self.tax_amt:
if hasattr(self.tax_amt, 'to_alipay_dict'):
params['tax_amt'] = self.tax_amt.to_alipay_dict()
else:
params['tax_amt'] = self.tax_amt
if self.tax_rate:
if hasattr(self.tax_rate, 'to_alipay_dict'):
params['tax_rate'] = self.tax_rate.to_alipay_dict()
else:
params['tax_rate'] = self.tax_rate
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = InputInvoiceOrderRequest()
if 'buyer_inst_id' in d:
o.buyer_inst_id = d['buyer_inst_id']
if 'currency_code' in d:
o.currency_code = d['currency_code']
if 'exclude_tax_invoice_amt' in d:
o.exclude_tax_invoice_amt = d['exclude_tax_invoice_amt']
if 'invoice_amt' in d:
o.invoice_amt = d['invoice_amt']
if 'invoice_code' in d:
o.invoice_code = d['invoice_code']
if 'invoice_date' in d:
o.invoice_date = d['invoice_date']
if 'invoice_material' in d:
o.invoice_material = d['invoice_material']
if 'invoice_no' in d:
o.invoice_no = d['invoice_no']
if 'invoice_receive_date' in d:
o.invoice_receive_date = d['invoice_receive_date']
if 'invoice_source' in d:
o.invoice_source = d['invoice_source']
if 'invoice_type' in d:
o.invoice_type = d['invoice_type']
if 'operator' in d:
o.operator = d['operator']
if 'out_bill_no' in d:
o.out_bill_no = d['out_bill_no']
if 'out_bill_type' in d:
o.out_bill_type = d['out_bill_type']
if 'relate_amount' in d:
o.relate_amount = d['relate_amount']
if 'relate_tax_amt' in d:
o.relate_tax_amt = d['relate_tax_amt']
if 'seller_ip_role_id' in d:
o.seller_ip_role_id = d['seller_ip_role_id']
if 'source' in d:
o.source = d['source']
if 'tax_amt' in d:
o.tax_amt = d['tax_amt']
if 'tax_rate' in d:
o.tax_rate = d['tax_rate']
return o
| alipay/alipay-sdk-python-all | alipay/aop/api/domain/InputInvoiceOrderRequest.py | InputInvoiceOrderRequest.py | py | 11,181 | python | en | code | 241 | github-code | 13 |
32336667678 | """
Project Euler Problem 24
========================
A permutation is an ordered arrangement of objects. For example, 3124 is
one possible permutation of the digits 1, 2, 3 and 4. If all of the
permutations are listed numerically or alphabetically, we call it
lexicographic order. The lexicographic permutations of 0, 1 and 2 are:
012 021 102 120 201 210
What is the millionth lexicographic permutation of the digits 0, 1, 2, 3,
4, 5, 6, 7, 8 and 9?
"""
from __future__ import division
from Euler import faculty_list
faculty = faculty_list(10)
digits = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
permcount = 999999
result = 0
# we have 1M permutations. 362880 (9!) are starting with 0
# 362880 are starting with 1 etc
# 3*362880 > 1M thus the first char must be 2
# 1M-2*362880 = 274240
# now we do the same with 274240 and 8! and go on until 0!
for i in range(9, 0, -1):
tmp = permcount // faculty[i]
permcount %= faculty[i]
result = result*10 + digits[tmp]
while tmp <= i and tmp+1 < len(digits):
digits[tmp] = digits[tmp+1]
tmp += 1
print(result*10+digits[0]) | mseibt/ProjectEuler | 024.py | 024.py | py | 1,124 | python | en | code | 1 | github-code | 13 |
12125888071 | #arr is array of (val, key) pairs
import math
import time
import random
def merge(arr1, arr2):
sortedArr = []
i = 0
j = 0
while i < len(arr1) or j < len(arr2):
if i >= len(arr1):
sortedArr.append(arr2[j])
j += 1
elif j >= len(arr2):
sortedArr.append(arr1[i])
i += 1
elif arr1[i][0] <= arr2[j][0]:
sortedArr.append(arr1[i])
i += 1
else:
sortedArr.append(arr2[j])
j += 1
return sortedArr
def mergeSort(arr):
if len(arr) < 2:
return arr
midpt = int(math.ceil(len(arr)/2))
half1 = mergeSort(arr[0:midpt])
half2 = mergeSort(arr[midpt:])
return merge(half1, half2)
def countSort(arr, univsize):
universe = []
for i in range(univsize):
universe.append([])
for elt in arr:
universe[elt[0]].append(elt)
sortedArr = []
for lst in universe:
for elt in lst:
sortedArr.append(elt)
return sortedArr
def convertBase(num, b, U):
remainders = []
while num > 0:
remainders.append(num % b)
num = num // b
converted = []
for r in range(len(remainders)):
converted.append(remainders[r])
size = len(converted)
maxSize = int(math.log(U, b)) + 1
for i in range(maxSize - size):
converted.append(0)
return converted
def radixSort(arr, length, univsize, base):
if length == 0 or length == 1:
return arr
for i in range(0, int(math.ceil(math.log(univsize, base)))):
baseConvertedArr = []
for (key, item) in arr:
baseConvertedArr.append((convertBase(key, base, univsize), [key, item]))
arr = baseConvertedArr
radixArr = []
others = []
for (number, pair) in arr:
radixArr.append((number[i], pair))
radixSortedArr = countSort(radixArr, base)
arr = []
for (number, pair) in radixSortedArr:
key, item = pair
arr.append((key, item))
return arr
# testing
arr = [(165, 'a'), (47, 'b'), (75, 'c'), (89, 'd'), (932, 'e'), (28, 'f'), (3, 'g'), (66, 'h')]
new = radixSort(arr, 8, 1000, 10)
for i in new:
print(i)
| Zaverot/CS120-2021-Fall | ps1/ps1.py | ps1.py | py | 2,252 | python | en | code | null | github-code | 13 |
41458320379 | from photoprism.Session import Session
from photoprism.Photo import Photo
pp_session = Session("admin", "changethis", "demo.photoprism.app")
pp_session.create()
p = Photo(pp_session)
data = p.search(query="original:*", count=1)
p.download_file(data[0]["Hash"])
data = p.list_albums(count=1)
p.download_album(uid=data[0]["UID"])
p.download_files_from_query(query="original:*", count=1) | mvlnetdev/photoprism_client | examples/download_files.py | download_files.py | py | 389 | python | en | code | 13 | github-code | 13 |
71350165459 | from torchvision.models.resnet import ResNet as _ResNet
from .extractor import Extractor
class ResNetExtractor(Extractor, _ResNet):
out_channels = [512, 256, 128, 64]
def _forward_impl(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
outs = []
x = self.layer1(x)
outs.append(x)
x = self.layer2(x)
outs.append(x)
x = self.layer3(x)
outs.append(x)
x = self.layer4(x)
outs.append(x)
return outs[::-1]
def remove_unused(self):
del self.avgpool
del self.fc
| cafeal/SIGNATE_AIEdge2 | src/CenterNet/models/extractors/resnet.py | resnet.py | py | 633 | python | en | code | 3 | github-code | 13 |
7372857135 | import logging, os
from email.utils import formatdate
from operator import itemgetter
from time import time
from filerockclient.databases.sqlite import SQLiteDB
fst = itemgetter(0)
snd = itemgetter(1)
def compose(g, f):
return lambda x: g(f(x))
LASTACCEPTEDSTATEKEY = 'LastAcceptedState'
class MetadataDB(object):
"""
A key-value store
It manage a locally persistent that keep whatever data
that should be available among different run of the client.
"""
def __init__(self, database_file):
self.logger = logging.getLogger("FR." + self.__class__.__name__)
self.db = SQLiteDB(database_file)
self.filename = database_file
def _file_exists(self):
return os.path.exists(self.filename)
def delete(self):
""" Deletes the db file from disk """
if self._file_exists():
os.unlink(self.filename)
def _recreate_db_if_not_exists(self):
""" Recreate the db file if not exists """
if not os.path.exists(self.filename) or not self._check_database_file():
self.logger.debug(
u'Local metadata database not found. Initializing '
'new local metadata DB...')
self.initialize_new()
def _query(self, statement, qargs):
""" Executes a query on database and return its result """
self._recreate_db_if_not_exists()
return self.db.query(statement, qargs)
def _check_database_file(self):
"""
Check if database file is present and is a regular
sqlite3 database file.
"""
query = "select value from metadata where key='last_connected_datetime' ;"
try: result = self.db.check_database_file(query)
except: self.logger.warning(u'Something went wrong attempting default query, result is: %s' % (result))
return result
def initialize_new(self):
""" Initialize a new local dataset database. """
self.logger.debug(u'Creating local_dataset table... ' )
result = self.db.execute('create table metadata (key text, value text) ;')
if result: self.logger.debug(u'Metadata database table successfully created.' )
else:
self.logger.warning(u'Something went wrong creating metadata database table... ' )
return False
result = self.db.execute("insert into metadata values ('last_connected_datetime','Thu, 01 Jan 1970 00:00:00 GMT') ;")
if result: self.logger.debug(u'Default entry successfully inserted in metadata database table.' )
else:
self.logger.warning(u'Something went wrong inserting default entry into metadata database table... ' )
return False
return True
def exists_record(self, key):
"""
Returns true if there is the key
@param key: the value of the key you are looking for
@return: boolean
"""
res = self._query('SELECT COUNT(*) FROM metadata WHERE key = ?', [key])
count = compose(fst, fst)(res) # Damn tuples
return count > 0
def get(self, key):
"""
Looks for the record with the given key
@param key: the key value
@return: the value associated with the given key
"""
statement = "SELECT value FROM metadata WHERE key=?"
result = self._query(statement, [key])
if isinstance(result, list) and len(result) == 1:
return result[0][0]
elif isinstance(result, list) and len(result) > 1:
self.logger.warning(u'More than one entry in metadata DB for %s!' % key)
return result[0][0]
else:
self.logger.warning(u'Something went wrong getting %s.' % key)
raise Exception('No key "%s" in metadataDB' % key)
def _get_value_or_None(self, key):
"""
Looks for the record with the given key
@param key: the key value
@return: the value associated with the given key
"""
if self.exists_record(key):
value = self.get(key)
else:
value = None
return value
def try_get(self, key):
"""
Tries to get the value associated with the given key
@return: the value or None
"""
return self._get_value_or_None(key)
def set(self, key, value):
"""
Adds a key or updates the value of a key
@param key: the key name
@param value: the key value
"""
if self.exists_record(key):
self._update_record(key, value)
else:
self._insert_record(key, value)
def _update_record(self, key, value):
"""
Updates the value of a key
@param key: key value
@param value: the value
"""
values = [value, key]
self.db.execute('UPDATE metadata SET value = ? WHERE key = ?', values)
def delete_key(self, keyvalue):
"""
Deletes a key
@param keyvalue: the key to delete
"""
self.logger.debug('Deleting key %s from metadatadb' % keyvalue)
self.db.execute("DELETE FROM metadata WHERE key = ?", [keyvalue])
def _insert_record(self, key, value):
"""
Adds the key
@param key: the key
@param value: the value
"""
values = [key, value]
self.db.execute('INSERT INTO metadata VALUES (?, ?)', values)
def update_last_connected_datetime(self, value=''):
"""
Update metadata record for last_connected_timestamp with given value.
@return: result of the update
"""
if value == '': value = formatdate(int(time()), localtime = False, usegmt = True)
statement = "update metadata set value=? where key=? ;"
eargs = [value,'last_connected_datetime']
result = self.db.execute(statement, eargs)
# This log is too verbose
#if result: self.logger.debug(u'Metadata database updated with "last_connected_datetime"="%s"' % (value))
# We're not even using this in the one-way default version, so let's just log errors if any
if not result: self.logger.warning(u'Something went wrong updating last_connected_datetime! ')
return result
def get_last_connected_datetime(self):
"""
Get last connected timestamp set.
@return: last_connected_date as string or None
"""
statement = "select value from metadata where key=? ;"
result = self._query(statement, ['last_connected_datetime'])
if isinstance(result, list) and len(result) == 1:
return result[0][0]
elif isinstance(result, list) and len(result) > 1:
self.logger.warning(u'More than one entry in metadata DB for last_connected_datetime!')
return result[0][0]
else:
self.logger.warning(u'Something went wrong getting last_connected_datetime.')
return None
def get_all(self):
"""
Returns a list of tuples
Any tuple will have the format (key, value)
@return: a list of tuples
"""
records = self._query('SELECT * FROM metadata', [])
result = []
for record in records:
key, value = record
result.append((key, value))
return result
if __name__ == '__main__':
#should become a test
logging.basicConfig()
logger = logging.getLogger('FR')
logger.setLevel(logging.DEBUG)
metadataDB = MetadataDB("./metadatadb")
if not metadataDB._check_database_file():
metadataDB.initialize_new()
metadataDB.set('key1','value1')
metadataDB.set('key2','value2')
logger.info(metadataDB.get_all())
logger.info(metadataDB.get('key1'))
logger.debug(metadataDB.get('key4'))
metadataDB.set('key3','value3')
metadataDB.set('key1','50004334')
logger.info(metadataDB.get_all())
os.unlink("./metadatadb") | alvinlai/FileRock-Client | filerockclient/databases/metadata.py | metadata.py | py | 7,951 | python | en | code | null | github-code | 13 |
42442707245 | # Создайте программу для игры с конфетами человек против человека.
# Условие задачи: На столе лежит 2021 конфета. Играют два игрока делая ход друг после друга.
# Первый ход определяется жеребьёвкой.
# За один ход можно забрать не более чем 28 конфет.
# Все конфеты оппонента достаются сделавшему последний ход.
# Сколько конфет нужно взять первому игроку, чтобы забрать все конфеты у своего конкурента?
import random
name1 = input('Введите имя первого игрока: ')
name2 = input('Введите имя второго игрока: ')
sweets = int(input('Введите количество конфет: '))
max_sweets = int(input('Введите максимальное количество конфет за ход: '))
first_turn = random.choice([name1, name2])
flag = name1 if first_turn == name1 else name2
while sweets >= 0:
print(f'Ваш ход {flag}:')
turn = int(input('Введите желаемое количеств конфет для взятия:'))
while not 0 <= turn <= max_sweets:
print('Введите конфеты в диапазоне от 0 до ', max_sweets)
sweets -= turn
if sweets >= 0:
print(f'Конфет осталось: {sweets}')
else:
print(f'Конфет осталось 0. ')
flag = name2 if flag == name1 else name1
winner = name2 if flag == name1 else name1
print(f'Поздравляем, победил игрок {winner}!')
| OlgaSuslova/starting_python | 12.01/2.py | 2.py | py | 1,784 | python | ru | code | 0 | github-code | 13 |
19670509364 | """ Timeseries driver for Landsat timeseries with meteorological data
"""
import datetime as dt
import logging
import os
from .timeseries_yatsm import YATSMTimeSeries
from ..ts_utils import ConfigItem, find_files
from ..series import Series
from ...logger import qgis_log
logger = logging.getLogger('tstools')
class YATSMMetTimeSeries(YATSMTimeSeries):
description = 'YATSM CCDCesque Timeseries + Met'
location = None
config = YATSMTimeSeries.config.copy()
config['met_location'] = ConfigItem('Met root location', 'PRISM')
config['met_types'] = ConfigItem('Met types',
['ppt', 'tmin', 'tmax', 'tmean'])
config['met_pattern'] = ConfigItem('Met data pattern', 'PRISM*.bil')
config['met_date_sep'] = ConfigItem('Met filename separator', '_')
config['met_date_sepno'] = ConfigItem('Met date index', 4)
config['met_date_format'] = ConfigItem('Met date format', '%Y%m')
def __init__(self, location, config=None):
super(YATSMMetTimeSeries, self).__init__(location, config=config)
min_max_symbology = {
'ppt': [0, 300],
'tmin': [-30, 35],
'tmean': [-30, 35],
'tmax': [-30, 35]
}
for met_type in self.config['met_types'].value:
logger.debug('Finding met data: %s' % met_type)
images = find_files(
os.path.join(self.config['met_location'].value, met_type),
self.config['met_pattern'].value)
# Get date index from file
img = os.path.splitext(os.path.basename(images[0]))[0]
d = img.split(self.config['met_date_sep'].value)[
self.config['met_date_sepno'].value]
try:
dt.datetime.strptime(d, self.config['met_date_format'].value)
except Exception as e:
qgis_log('Could not parse date from %ith "%s"-separated field '
'of filename %s using date format %s: %s' %
(self.config['met_date_sepno'].value,
self.config['met_date_sep'].value,
img,
self.config['met_date_format'].value,
e.message))
raise
idx_start = img.find(d)
self._met_date_index = idx_start, idx_start + len(d)
series = Series(
images,
self._met_date_index,
self.config['met_date_format'].value,
{
'description': met_type,
'symbology_hint_indices': [0],
'cache_prefix': 'met_%s_' % met_type,
'cache_suffix': '.npy'
}
)
if met_type in min_max_symbology:
series.symbology_hint_minmax = min_max_symbology[met_type]
self.series.append(series)
| ceholden/TSTools | tstools/src/ts_driver/drivers/timeseries_yatsm_met.py | timeseries_yatsm_met.py | py | 2,936 | python | en | code | 52 | github-code | 13 |
184496373 | from django.shortcuts import render
from django.contrib.auth.models import User
from UserAuthsAPP.models import UserProfile
from ShopAPP.models import Product, WhishList
# Create your views here.
def Index(request):
if request.user.is_authenticated:
productsWithWish = list()
#get the current logged user
LoggedUser = User.objects.get(username=request.user)
#get all products
products = Product.objects.all()
for product in products:
#Check If item is in WishList
wishItemExist = WhishList.objects.all().filter(user=LoggedUser,product=product).exists()
if wishItemExist :
item ={
"wish": True,
"details": product,
}
productsWithWish.append(item)
else:
item ={
"wish": False,
"details": product,
}
productsWithWish.append(item)
context = {
"products": productsWithWish,
}
return render(request, "CoreAPP/Index.html",context)
else:
products = Product.objects.all()
context = {
"products": products,
}
return render(request, "CoreAPP/Index.html",context)
def Search(request):
searchword = str()
if request.method=="POST":
searchword = request.POST["search"]
product_list = Product.objects.filter(name__icontains=searchword)
context = {'product_list':product_list,"searchword":searchword}
return render(request, "CoreAPP/Search.html",context)
def Contact(request):
return render(request, "CoreAPP/Contact.html")
def About(request):
return render(request, "CoreAPP/About.html") | donregulus/Teferet | TeferetPROJECT/CoreAPP/views.py | views.py | py | 1,920 | python | en | code | 0 | github-code | 13 |
25105261723 | # -*- coding: utf-8 -*-
import cv2
import numpy as np
import sys
# ============================================================================
# ============================================================================
class PolygonDrawer(object):
def __init__(self, file_name):
self.window_name = file_name # Name for our window
self.done = False # Flag signalling we're done
self.start = False # Flag start to draw
self.current = (0, 0) # Current position, so we can draw the line-in-progress
self.points = [] # List of points defining our polygon
self.area = [] # unregular area
self.CANVAS_SIZE = (600,800)
self.FINAL_LINE_COLOR = (255, 255, 255)
self.WORKING_LINE_COLOR = (127, 127, 127)
self.LINE_SIZE=3
def on_mouse(self, event, x, y, buttons, user_param):
# Mouse callback that gets called for every mouse event (i.e. moving, clicking, etc.)
if self.done: # Nothing more to do
return
if event == cv2.EVENT_MOUSEMOVE:
# We want to be able to draw the line-in-progress, so update current mouse position
if self.start:
self.current = (x, y)
print("Adding point #%d with position(%d,%d)" % (len(self.points), x, y))
self.points.append((x, y))
elif event == cv2.EVENT_LBUTTONDOWN:
self.start=True
elif event == cv2.EVENT_LBUTTONUP:
self.start=False
if self.points.__len__() > 2:
self.area.append(self.points)
self.points=[]
def run(self):
# Let's create our working window and set a mouse callback to handle events
cv2.namedWindow(self.window_name, flags=cv2.CV_WINDOW_AUTOSIZE)
# picture only
img = cv2.imread(self.window_name)
height, width, channels = img.shape
self.CANVAS_SIZE = (height,width,channels)
cv2.imshow(self.window_name, img)
cv2.waitKey(1)
cv2.cv.SetMouseCallback(self.window_name, self.on_mouse)
while(not self.done):
# This is our drawing loop, we just continuously draw new images
# and show them in the named window
# canvas = np.zeros(self.CANVAS_SIZE, np.uint8)
canvas = img.copy()
if (len(self.points) > 0):
# Draw all the current polygon segments
cv2.polylines(canvas, np.array([self.points]), False, self.FINAL_LINE_COLOR, self.LINE_SIZE)
# And also show what the current segment would look like
cv2.line(canvas, self.points[-1], self.current, self.WORKING_LINE_COLOR)
for area in self.area:
# Draw all the current polygon segments
cv2.polylines(canvas, np.array([area]), False, self.FINAL_LINE_COLOR, self.LINE_SIZE)
# And also show what the current segment would look like
cv2.line(canvas, area[-1], area[0], self.WORKING_LINE_COLOR)
# Update the window
cv2.imshow(self.window_name, canvas)
# And wait 50ms before next iteration (this will pump window messages meanwhile)
if cv2.waitKey(50) & 0xFF == 27: # ESC hit
self.done = True
# User finised entering the polygon points, so let's make the final drawing
# of a filled polygon
canvas = img.copy()
for area in self.area:
cv2.fillPoly(canvas, np.array([area]), self.FINAL_LINE_COLOR)
# And show it
cv2.imshow(self.window_name, canvas)
# Waiting for the user to press any key
cv2.waitKey()
cv2.destroyWindow(self.window_name)
return canvas
# ============================================================================
if __name__ == "__main__":
filename='/home/yzbx/Pictures/demo.jpg';
if len(sys.argv) < 2:
print('default input file is ',filename)
else:
filename=sys.argv[1]
print('input file is ',filename)
pd = PolygonDrawer(filename)
image = pd.run()
cv2.imwrite("polygon.png", image)
print("Polygon = %s" % pd.points)
| ISCAS007/demo | areadetection/image_draw.py | image_draw.py | py | 4,205 | python | en | code | 0 | github-code | 13 |
36635427448 | import datetime
from app.shared import schema
from app.shared.data import delete, load, store
from app.shared.handler import lambda_handler
from app.shared.utils import convert_timestamp, is_expired
SCHEMA = schema.Schema(
prompt_user_hash=schema.HASH | schema.REQUIRED,
)
@lambda_handler(SCHEMA)
async def handler(values=None, **kwargs):
stored_data = await load('user', values.prompt_user_hash)
if not stored_data:
return 404, {'error': 'No available prompt'}
prompt_identifier = stored_data.get('promptIdentifier')
state = 'pending' if stored_data.get('state') in ('pending', 'received') else stored_data.get('state')
expire_at = convert_timestamp(stored_data.get('expireAt'))
if state not in ('pending', 'expired', 'aborted'):
await delete('user', values.prompt_user_hash)
return 404, {'error': 'No available prompt'}
if stored_data.get('state') in ('pending', 'received') and is_expired(expire_at):
state = 'expired'
store_data = dict(stored_data)
store_data['state'] = state
await store('prompt', prompt_identifier, store_data)
elif stored_data.get('state') == 'pending':
store_data = dict(stored_data)
store_data['state'] = 'received'
await store('user', values.prompt_user_hash, store_data)
await store('prompt', prompt_identifier, store_data)
if state in ('expired', 'aborted') and is_expired(expire_at + datetime.timedelta(seconds=600)):
await delete('user', values.prompt_user_hash)
return 404, {'error': 'No available prompt'}
elif state in ('expired', 'aborted'):
await delete('user', values.prompt_user_hash)
return 200, {
'state': state,
'encryptedData': stored_data.get('encryptedData'),
'uniqueIdentifier': stored_data.get('uniqueIdentifier'),
'timestamp': stored_data.get('timestamp'),
'expireAt': stored_data.get('expireAt')
}
| require-id/core | app/src/app/endpoints/user/poll.py | poll.py | py | 1,962 | python | en | code | 2 | github-code | 13 |
31971395163 | #!/usr/bin/env python
# coding: utf-8
import argparse
import sys
from collections import defaultdict
from Bio import SeqIO, AlignIO
from Bio.SeqRecord import SeqRecord
from Bio.Seq import Seq
from collections import Counter
from contextlib import redirect_stdout
def _get_args():
parser = argparse.ArgumentParser(
description='Convert FASTA-format multiple sequence alignment into a txt file. Assumes Courier New')
parser.add_argument(
"--input",
"-i",
"--in",
metavar="FILE",
help="Input FASTA file",
required=True)
parser.add_argument(
"--output",
"-o",
"--out",
"--output",
metavar="FILE",
help="output txt file")
parser.add_argument(
"-r",
"--ref",
type=str,
help="reference entry name")
parser.add_argument(
"-s",
"--start",
type=int,
help="start position")
parser.add_argument(
"-e",
"--end",
type=int,
help="end position")
parser.add_argument(
"-g",
"--gap",
type=str,
help='gap character (default: "-")',
default="-")
parser.add_argument(
"-w",
"--wrap",
type=int,
help='line width (default: 100)',
default=100)
parser.add_argument(
'--gap_inclusive',
help='Gap inclusive (default: False). ',
action='store_true')
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)
args = parser.parse_args()
return args
def get_ref_record(records, ref_name):
record_dict = dict()
for record in records:
record_dict[record.id] = record
ref_record = record_dict[ref_name]
return ref_record
def check_start_end_coords(
ref_record,
start,
end,
gap_character,
gap_inclusive):
ref_record_len_gap_exclusive = len(ref_record.seq.ungap(gap_character))
if start < 1:
start = 1
if end > ref_record_len_gap_exclusive:
end = ref_record_len_gap_exclusive
return start, end
def residue_count(ref_record, gap_character, gap_inclusive):
gap_character = gap_character
residue_count = 1
residue_count_dict = dict()
seq_length = len(ref_record.seq)
if gap_inclusive is True:
for i in range(seq_length):
residue_count_dict[residue_count] = i
residue_count += 1
else:
for i in range(seq_length):
if ref_record.seq[i] == gap_character:
pass
else:
residue_count_dict[residue_count] = i
residue_count += 1
return residue_count_dict
def get_residues(records):
alignment_length = int(records.get_alignment_length())
residue_dir = {}
num_records = len(records)
for column in range(0, alignment_length):
residue_list = []
column_residues = records[:, column:column + 1]
for column_residue in column_residues:
residue_list.append(str(column_residue.seq))
c = Counter(residue_list)
counter_out = [(i, c[i] / num_records * 100.0)
for i, count in c.most_common()]
if counter_out[0][0] == "-":
residue_dir[column] = " "
else:
if 80 <= float(counter_out[0][1]) < 100:
residue_dir[column] = "."
elif float(counter_out[0][1]) == 100:
residue_dir[column] = "*"
else:
residue_dir[column] = " "
residues = []
for key in residue_dir.keys():
residue = residue_dir[key]
residues.append(residue)
return residues
def add_conservation(residues, start, end, wrap):
residues = "".join(residues)
record_id = "".ljust(15, " ")
out_text = "{} {} {} {}".format(
record_id, " ".rjust(4), residues, " ".rjust(4))
return out_text
def make_text(record, start, end, wrap):
record_id = str(record.id)[:15].ljust(15, " ")
out_text = "{} {} {} {}".format(
record_id,
str(start).rjust(4),
record.seq,
str(end).rjust(4))
return out_text
def make_start_end_dict(records, residue_dict, start, end):
record_ids = []
for record in records:
record_ids.append(len(record.id))
max_name_length = max(record_ids)
start_end_dict = {}
for record in records:
name_space = max_name_length - len(record.id)
start_loc = len(record.seq[:residue_dict[start]].ungap()) + 1
end_loc = len(record.seq[:residue_dict[end] + 1].ungap())
start_end_dict[record.id] = (name_space, start_loc, end_loc)
return start_end_dict
def define_preceding_residues(
records,
ref_name,
ref_record,
start,
end,
gap_character,
gap_inclusive):
preceding_residues = defaultdict(dict)
if ref_name is not None:
start, end = check_start_end_coords(
ref_record, start, end, gap_character, gap_inclusive)
residue_dict = residue_count(ref_record, gap_character, gap_inclusive)
aln_out = records[:, residue_dict[start]:residue_dict[end] + 1]
aln_preceding = records[:, :residue_dict[start] - 1]
else:
start, end = 1, len(ref_record.seq)
aln_preceding = records[:, 0:0]
aln_out = records
for record in aln_preceding:
if ref_name is not None:
preceding_residues[record.id] = (len(record.seq.ungap()) + 2)
else:
preceding_residues[record.id] = (len(record.seq.ungap()) + 1)
return aln_out, preceding_residues
def print_records(records, start, end, wrap, preceding_residues):
count = 0
residues = get_residues(records)
residues_chunks = [residues[i:i + wrap]
for i in range(0, len(residues), wrap)]
record_chunks = [records[:, i:i + wrap]
for i in range(0, len(residues), wrap)]
num_chunks = len(range(0, len(residues), wrap))
chunk_seq_len = defaultdict(int)
for key in preceding_residues.keys():
chunk_seq_len[key] = preceding_residues[key]
for chunk in range(0, num_chunks):
residues = residues_chunks[chunk]
residue_group = add_conservation(residues, start, end, wrap)
print(residue_group)
record_chunk = record_chunks[chunk]
for record in record_chunk:
count += 1
chunk_seq_len[record.id] += len(record.seq.ungap())
start = (chunk_seq_len[record.id] - len(record.seq.ungap()))
if len(record.seq.ungap()) == 0:
end = start
else:
end = (chunk_seq_len[record.id] - 1)
record_group = make_text(record, start, end, wrap)
print(record_group)
print()
def main():
args = _get_args()
in_fa = args.input
out_file = args.output
ref_name = args.ref
start = args.start
end = args.end
wrap = args.wrap
gap_character = args.gap
gap_inclusive = args.gap_inclusive
records = AlignIO.read(in_fa, "fasta")
num_of_entries = len(records)
if ref_name is not None:
ref_record = get_ref_record(records, ref_name)
else:
ref_record = records[0]
if start is None:
start = 1
if end is None or end > len(ref_record.seq):
end = len(ref_record.seq)
aln_out, preceding_residues = define_preceding_residues(
records, ref_name, ref_record, start, end, gap_character, gap_inclusive)
if out_file is not None:
with open(out_file, 'w') as f:
with redirect_stdout(f):
print_records(aln_out, start, end, wrap, preceding_residues)
else:
print_records(aln_out, start, end, wrap, preceding_residues)
if __name__ == "__main__":
main()
| satoshikawato/bio_small_scripts | msa_to_txt.py | msa_to_txt.py | py | 7,873 | python | en | code | 2 | github-code | 13 |
13014037112 | import os
import collections
import yaml
import numpy as np
import torch
import gtn
from mathtools import utils, metrics, torchutils
from seqtools import fstutils_gtn as libfst
def sampleGT(transition_probs, initial_probs):
cur_state = np.random.choice(initial_probs.shape[0], p=initial_probs)
gt_seq = [cur_state]
while True:
transitions = transition_probs[cur_state, :]
cur_state = np.random.choice(transitions.shape[0], p=transitions)
if cur_state == transitions.shape[0] - 1:
return np.array(gt_seq)
gt_seq.append(cur_state)
def sampleScores(gt_seq, num_states):
""" score[i, j, k] := weight(sample i | state j -> state k) """
num_samples = len(gt_seq) - 1
scores = np.random.random_sample(size=(num_samples, num_states, num_states))
return scores
def samplePair(transition_probs, initial_probs):
gt_seq = sampleGT(transition_probs, initial_probs)
score_seq = sampleScores(gt_seq, initial_probs.shape[0])
return gt_seq, score_seq
def simulate(num_samples, transition, initial, final):
transition_probs = np.hstack((transition, final[:, None]))
transition_probs /= transition_probs.sum(axis=1)[:, None]
initial_probs = initial.copy()
initial_probs /= initial_probs.sum()
simulated_dataset = tuple(
samplePair(transition_probs, initial_probs)
for __ in range(num_samples)
)
return simulated_dataset
def main(
out_dir=None, gpu_dev_id=None,
num_samples=10, random_seed=None,
learning_rate=1e-3, num_epochs=500,
dataset_kwargs={}, dataloader_kwargs={}, model_kwargs={}):
if out_dir is None:
out_dir = os.path.join('~', 'data', 'output', 'seqtools', 'test_gtn')
out_dir = os.path.expanduser(out_dir)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
fig_dir = os.path.join(out_dir, 'figures')
if not os.path.exists(fig_dir):
os.makedirs(fig_dir)
vocabulary = ['a', 'b', 'c', 'd', 'e']
transition = np.array(
[[0, 1, 0, 0, 0],
[0, 0, 1, 1, 0],
[0, 0, 0, 0, 1],
[0, 1, 0, 0, 1],
[0, 0, 0, 0, 0]], dtype=float
)
initial = np.array([1, 0, 1, 0, 0], dtype=float)
final = np.array([0, 1, 0, 0, 1], dtype=float) / 10
seq_params = (transition, initial, final)
simulated_dataset = simulate(num_samples, *seq_params)
label_seqs, obsv_seqs = tuple(zip(*simulated_dataset))
seq_params = tuple(map(lambda x: -np.log(x), seq_params))
dataset = torchutils.SequenceDataset(obsv_seqs, label_seqs, **dataset_kwargs)
data_loader = torch.utils.data.DataLoader(dataset, **dataloader_kwargs)
train_loader = data_loader
val_loader = data_loader
transition_weights = torch.tensor(transition, dtype=torch.float).log()
initial_weights = torch.tensor(initial, dtype=torch.float).log()
final_weights = torch.tensor(final, dtype=torch.float).log()
model = libfst.LatticeCrf(
vocabulary,
transition_weights=transition_weights,
initial_weights=initial_weights, final_weights=final_weights,
debug_output_dir=fig_dir,
**model_kwargs
)
gtn.draw(
model._transition_fst, os.path.join(fig_dir, 'transitions-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
model._duration_fst, os.path.join(fig_dir, 'durations-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
if True:
for i, (inputs, targets, seq_id) in enumerate(train_loader):
arc_scores = model.scores_to_arc(inputs)
arc_labels = model.labels_to_arc(targets)
batch_size, num_samples, num_classes = arc_scores.shape
obs_fst = libfst.linearFstFromArray(arc_scores[0].reshape(num_samples, -1))
gt_fst = libfst.fromSequence(arc_labels[0])
d1_fst = gtn.compose(obs_fst, model._duration_fst)
d1_fst = gtn.project_output(d1_fst)
denom_fst = gtn.compose(d1_fst, model._transition_fst)
# denom_fst = gtn.project_output(denom_fst)
num_fst = gtn.compose(denom_fst, gt_fst)
viterbi_fst = gtn.viterbi_path(denom_fst)
pred_fst = gtn.remove(gtn.project_output(viterbi_fst))
loss = gtn.subtract(gtn.forward_score(num_fst), gtn.forward_score(denom_fst))
loss = torch.tensor(loss.item())
if torch.isinf(loss).any():
denom_alt = gtn.compose(obs_fst, model._transition_fst)
d1_min = gtn.remove(gtn.project_output(d1_fst))
denom_alt = gtn.compose(d1_min, model._transition_fst)
num_alt = gtn.compose(denom_alt, gt_fst)
gtn.draw(
obs_fst, os.path.join(fig_dir, 'observations-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
gt_fst, os.path.join(fig_dir, 'labels-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
d1_fst, os.path.join(fig_dir, 'd1-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
d1_min, os.path.join(fig_dir, 'd1-min-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
denom_fst, os.path.join(fig_dir, 'denominator-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
denom_alt, os.path.join(fig_dir, 'denominator-alt-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
num_fst, os.path.join(fig_dir, 'numerator-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
num_alt, os.path.join(fig_dir, 'numerator-alt-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
viterbi_fst, os.path.join(fig_dir, 'viterbi-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
pred_fst, os.path.join(fig_dir, 'pred-init.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
import pdb; pdb.set_trace()
# Train the model
train_epoch_log = collections.defaultdict(list)
val_epoch_log = collections.defaultdict(list)
metric_dict = {
'Avg Loss': metrics.AverageLoss(),
'Accuracy': metrics.Accuracy()
}
criterion = model.nllLoss
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1, gamma=1.00)
model, last_model_wts = torchutils.trainModel(
model, criterion, optimizer, scheduler, train_loader,
val_loader,
metrics=metric_dict,
test_metric='Avg Loss',
train_epoch_log=train_epoch_log,
val_epoch_log=val_epoch_log,
num_epochs=num_epochs
)
gtn.draw(
model._transition_fst, os.path.join(fig_dir, 'transitions-trained.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
gtn.draw(
model._duration_fst, os.path.join(fig_dir, 'durations-trained.png'),
isymbols=model._arc_symbols, osymbols=model._arc_symbols
)
torchutils.plotEpochLog(
train_epoch_log, title="Train Epoch Log",
fn=os.path.join(fig_dir, "train-log.png")
)
if __name__ == "__main__":
# Parse command-line args and config file
cl_args = utils.parse_args(main)
config, config_fn = utils.parse_config(cl_args, script_name=__file__)
# Create output directory, instantiate log file and write config options
out_dir = os.path.expanduser(config['out_dir'])
if not os.path.exists(out_dir):
os.makedirs(out_dir)
with open(os.path.join(out_dir, config_fn), 'w') as outfile:
yaml.dump(config, outfile)
utils.copyFile(__file__, out_dir)
main(**config)
| jd-jones/seqtools | tests/test_gtn.py | test_gtn.py | py | 8,491 | python | en | code | 1 | github-code | 13 |
74377288337 | import subprocess
import time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import os
import stat
import shutil
import mimetypes
import sys
if __name__ == "__main__":
patterns = ["*"]
ignore_patterns = None
ignore_directories = False
case_sensitive = True
my_event_handler = PatternMatchingEventHandler(
patterns, ignore_patterns, ignore_directories, case_sensitive)
# Normally mounted as a docker volume
# User can drop more themes into the shared themes mount as needed
themes_path = "/themes/"
# Embedded in hugo watcher to have at least one default theme
# Currently based on the hugo-clarity theme
themes_template_path = "/themes_template/"
# Normally mounted as a docker volume - user can replace this content as needed
# all content is removed we will replace it with the default site example
site_path = "/src"
scan_lock = False
# There are three levels of overrides for the template path
# The template path provides all the initial contents for the starter site
# When the site_path is empty the site_template_path contents will
# be copied in to it to create the default starting site.
# The override priority is:
#
# 1. If the SITE_TEMPLATE_PATH env var is set, that will be used. Typically
# you might mount this template as a docker volume so that you can
# provide your own template.
# 2. If the SITE_TEMPLATE_PATH is NOT set and the THEME env var is set then
# we look in the theme dir for a folder called exampleSite and use the
# content we find there as the basis for the site template. The naming
# convention of exampleSite is from https://themes.gohugo.io/ which
# provides many nice themes.
# 3. If neither of the above are specified, we will use the exampleSite
# folder provided in the clarity theme which is shipped with this project
# by default insied the themes_template_path.
# Embedded in hugo watcher to have at least one default site
# Currently based on the hugo-clarity theme
site_template_path = os.path.join(
themes_template_path,
'hugo-clarity',
'exampleSite')
# If the user has set a theme we override the default example site
# with the one provided in the theme
if os.environ.get('SITE_TEMPLATE_PATH'):
site_template_path = os.environ.get('SITE_TEMPLATE_PATH')
elif os.environ.get('THEME'):
site_template_path = os.path.join(
themes_template_path,
os.environ.get('THEME'),
'exampleSite')
def on_created(event):
if not scan_lock:
print(f"{event.src_path} has been created!")
run_hugo()
else:
print("Scan lock enabled")
def on_deleted(event):
if not scan_lock:
print(f"File / path deleted: {event.src_path}!")
run_hugo()
else:
print("Scan lock enabled")
def on_modified(event):
if not scan_lock:
print(f"{event.src_path} has been modified")
run_hugo()
else:
print("Scan lock enabled")
def on_moved(event):
if not scan_lock:
print(f"File moved {event.src_path} to {event.dest_path}")
run_hugo()
else:
print("Scan lock enabled")
def check_site_exists():
# We take the site default content from the exampleSite directory
# of the active theme.
if not os.listdir(site_path):
print("Site directory is empty - copying in site_template structure")
files = os.listdir( site_template_path )
for file in files:
if os.path.isdir(os.path.join(site_template_path, file)):
print("Copying dir:", file)
destination = shutil.copytree(
os.path.join(site_template_path, file),
os.path.join(site_path, file))
else:
print("Copying file:", file)
destination = shutil.copyfile(
os.path.join(site_template_path, file),
os.path.join(site_path, file))
for root, dirs, files in os.walk(site_path):
for directory in dirs:
os.chmod(os.path.join(root, directory), stat.S_IWOTH)
for file in files:
# This breaks nginx giving a 403 on some image files
# disabling for now
#os.chmod(os.path.join(root, file), stat.S_IWOTH)
# If the Env var DOMAIN is set (which should typically be the
# case), replace any instance of example.com in the template
# file contents
filename, file_extension = os.path.splitext(file)
print("Replacing example.com, example.org in :", file)
try:
file_in = open(os.path.join(root, file), "rt")
contents = file_in.read()
contents = contents.replace('https://example.com', os.environ.get('DOMAIN'))
contents = contents.replace('https://example.org', os.environ.get('DOMAIN'))
contents = contents.replace('http://example.com', os.environ.get('DOMAIN'))
contents = contents.replace('http://example.org', os.environ.get('DOMAIN'))
file_in.close()
file_out = open(os.path.join(root, file), "wt")
file_out.write(contents)
file_out.close()
except Exception as ex:
print (ex)
def check_themes_exists():
# Copy over the theme files from the theme dir if not present
if not os.listdir(themes_path):
print("Themes directory is empty - copying in themes_template structure")
files = os.listdir( themes_template_path )
for file in files:
if os.path.isdir(os.path.join(themes_template_path, file)):
print("Copying dir:", file)
destination = shutil.copytree(
os.path.join(themes_template_path, file),
os.path.join(themes_path, file))
else:
print("Copying file:", file)
destination = shutil.copyfile(
os.path.join(themes_template_path, file),
os.path.join(themes_path, file))
for root, dirs, files in os.walk("path"):
for d in dirs:
os.chmod(os.path.join(root, d), stat.S_IWOTH)
for f in files:
os.chmod(os.path.join(root, f), stat.S_IWOTH)
def run_hugo():
scan_lock = True
check_site_exists()
check_themes_exists()
if os.environ.get('THEME'):
print("Running hugo with theme ", os.environ.get('THEME'))
cp = subprocess.run([
"/bin/hugo", "--destination", "/public",
"--themesDir", "/themes", "--theme", os.environ.get('THEME')])
else:
print("Running hugo with theme hugo-clarity")
cp = subprocess.run([
"/bin/hugo", "--destination", "/public",
"--themesDir", "/themes", "--theme", "hugo-clarity"])
scan_lock = False
check_site_exists()
check_themes_exists()
run_hugo()
my_event_handler.on_created = on_created
my_event_handler.on_deleted = on_deleted
my_event_handler.on_modified = on_modified
my_event_handler.on_moved = on_moved
go_recursively = True
my_observer = Observer()
my_observer.schedule(my_event_handler, site_path, recursive=go_recursively)
my_observer.schedule(my_event_handler, themes_path, recursive=go_recursively)
my_observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
my_observer.stop()
my_observer.join()
| kartoza/hugo-watcher | hugo_watcher.py | hugo_watcher.py | py | 7,648 | python | en | code | 0 | github-code | 13 |
19180055305 | import pandas as pd
# Crear un DataFrame de ejemplo
data = {'Nombre': ['Juan', 'María', 'Pedro'],
'Edad': [25, 30, 35],
'Ciudad': ['Madrid', 'Barcelona', 'Sevilla']}
df = pd.DataFrame(data)
# Escribir el DataFrame en formato Parquet
df.to_parquet(r'C:\Users\migumart\OneDrive - Nokia\Archivos personales\Automatizacion Python\Templates_Python\Formato_Parquet.parquet')
| Nezzu14/Automatizaciones | Timo Project/Formato a Parquet.py | Formato a Parquet.py | py | 388 | python | es | code | 0 | github-code | 13 |
39859715441 | from unittest.mock import Mock
from brownie import network,accounts,config,FundMe,MockV3Aggregator
from scripts.helper_scripts import getAccount,deploy_mocks,LOCAL_DEVELOPMENT_ENVIRONMENTS
def fund_me_deploy():
account = getAccount()
if(network.show_active() not in LOCAL_DEVELOPMENT_ENVIRONMENTS):
priceFeed = config["networks"][network.show_active()]["eth_usd_price_feed"]
else:
deploy_mocks()
priceFeed = MockV3Aggregator[-1].address
fund_me = FundMe.deploy(priceFeed,{"from":account},publish_source = config["networks"][network.show_active()].get("verify"))
print(f"Contract deployed to {fund_me.address}")
return fund_me
def main():
fund_me_deploy() | prathamesh1301/Fund-Me-App | scripts/deploy.py | deploy.py | py | 720 | python | en | code | 0 | github-code | 13 |
72212589779 | salary=[]
total_hours=0
extra_hours=0
for i in range(7):
inp=int(input())
total_hours+=inp
salary.append(inp)
if inp>8:
extra_hours+=inp-8
extra=0
sunday_extra_salary=0
if salary[0]>0:
sunday_extra_salary+=(salary[0]*100)//2
saturday_extra_salary=0
if salary[6]>0:
saturday_extra_salary+=(salary[6]*100)//4
if total_hours>40:
extra=total_hours-40
total_salary=total_hours*100+extra_hours*15+extra*25+sunday_extra_salary+saturday_extra_salary
print(total_salary)
| Shreesaraan/salary_calculator | salary_calculator.py | salary_calculator.py | py | 501 | python | en | code | 0 | github-code | 13 |
733259250 | #!/usr/bin/python3
""" Rectangle Class """
from models.base import Base
class Rectangle(Base):
""" Define the Rectangle class that inhirite from the Base class """
def __init__(self, width, height, x=0, y=0, id=None):
""" initialize instance of rectangle """
self.width = width
self.height = height
self.x = x
self.y = y
super().__init__(id)
# Width getter and setter
@property
def width(self):
""" Get the width of a rectangle """
return (self.__width)
@width.setter
def width(self, value):
""" Set the width of a rectangle
Args:
value: Value to assign
"""
if type(value) != int:
raise TypeError("width must be an integer")
if value <= 0:
raise ValueError("width must be > 0")
self.__width = value
# Height getter and setter
@property
def height(self):
""" Get the height of a rectangle """
return (self.__height)
@height.setter
def height(self, value):
""" Set the height of a rectangle
Args:
value: Value to assign
"""
if type(value) != int:
raise TypeError("height must be an integer")
if value <= 0:
raise ValueError("height must be > 0")
self.__height = value
# x getter and setter
@property
def x(self):
""" Get the x of a rectangle """
return (self.__x)
@x.setter
def x(self, value):
""" Set the x of a rectangle
Args:
value: Value to assign
"""
if type(value) != int:
raise TypeError("x must be an integer")
if value < 0:
raise ValueError("x must be >= 0")
self.__x = value
# y getter and setter
@property
def y(self):
""" Get the y of a rectangle """
return (self.__y)
@y.setter
def y(self, value):
""" Set the y of a rectangle
Args:
value: Value to assign
"""
if type(value) != int:
raise TypeError("y must be an integer")
if value < 0:
raise ValueError("y must be >= 0")
self.__y = value
# Public method area()
def area(self):
""" Return The area of the rectangle """
return (self.width * self.height)
# Public method display()
def display(self):
""" Print the rectangle to the stdout """
char = '#'
print("\n" * self.y, end="")
print('\n'.join([" " * self.x + char * self.width] * self.height))
# __str__ method
def __str__(self):
""" Return a string represntation of the object Rectangle """
return ("[Rectangle] ({}) {}/{} - {}/{}".format(self.id,
self.x, self.y,
self.width,
self.height))
# Public methos update
def update(self, *args, **kwargs):
""" Update the rectangle
*args:
1st argument should be the id attribute
2nd argument should be the width attribute
3rd argument should be the height attribute
4th argument should be the x attribute
5th argument should be the y attribute
**kwargs: key=value for each attribute
"""
if args and len(args) > 0:
if len(args) >= 1 and args[0] is not None:
self.id = args[0]
if len(args) >= 2:
self.width = args[1]
if len(args) >= 3:
self.height = args[2]
if len(args) >= 4:
self.x = args[3]
if len(args) >= 5:
self.y = args[4]
elif kwargs and len(kwargs) > 0:
for k, v in kwargs.items():
if k == "id" and v is not None:
self.id = v
elif k == "width":
self.width = v
elif k == "height":
self.height = v
elif k == "x":
self.x = v
elif k == "y":
self.y = v
# Public method to_dictionary
def to_dictionary(self):
""" Return the dictionary representation of a Rectangle """
return ({
"id": self.id,
"width": self.width,
"height": self.height,
"x": self.x,
"y": self.y
})
| Aksaim-mohamed-amin/alx-higher_level_programming | 0x0C-python-almost_a_circle/models/rectangle.py | rectangle.py | py | 4,559 | python | en | code | 1 | github-code | 13 |
74267599059 | import math
from common import problem_data
def crabs(input_data):
return [int(c, base=10) for c in next(input_data).split(",")]
def fuel_cost(crabs, position):
cost = 0
for crab in crabs:
cost += abs(crab - position)
return cost
def find_lowest_cost(crabs):
max_crab = max(crabs)
lowest_cost = math.inf
for i in range(max_crab + 1):
cost = fuel_cost(crabs, i)
if cost < lowest_cost:
lowest_cost = cost
return lowest_cost
print(find_lowest_cost(crabs(problem_data("day7_sample.txt"))))
print(find_lowest_cost(crabs(problem_data("day7_problem.txt"))))
def step_costs(max_count):
costs = [0]
for i in range(1, max_count + 1):
costs.append(i + costs[-1])
return costs
def fuel_rising_cost(crabs, position, costs):
cost = 0
for crab in crabs:
cost += costs[abs(crab - position)]
return cost
def find_lowest_rising_cost(crabs):
max_crab = max(crabs)
costs = step_costs(max_crab)
lowest_cost = math.inf
for i in range(max_crab + 1):
cost = fuel_rising_cost(crabs, i, costs)
if cost < lowest_cost:
lowest_cost = cost
return lowest_cost
print(find_lowest_rising_cost(crabs(problem_data("day7_sample.txt"))))
print(find_lowest_rising_cost(crabs(problem_data("day7_problem.txt"))))
| firesock/advent-of-code | 2021/day7.py | day7.py | py | 1,343 | python | en | code | 0 | github-code | 13 |
5213671955 | #coding=utf-8
#Version: python3.6.0
#Tools: Pycharm 2017.3.2
_author_ = ' Hermione'
x,y,z=map(int,input().split())
numlist=[]
numlist.append(x)
numlist.append(y)
numlist.append(z)
numlist2=sorted(numlist)
print("{}->{}->{}".format(numlist2[0],numlist2[1],numlist2[2]))
#灵活运用python中自带的排序函数
#列表的append方法只能一个一个添加
| Harryotter/zhedaPTApython | ZheDapython/z2/z2.9.py | z2.9.py | py | 363 | python | zh | code | 1 | github-code | 13 |
26431488301 | with open("./input.txt") as f:
start, end = [int(n.strip()) for n in f.read().strip().split("-")]
def valid(num):
if not (start < num < end):
return False
dig = list(str(num))
double = False
for idx in range(len(dig) - 1):
if dig[idx] > dig[idx+1]:
return False
if dig[idx] == dig[idx+1]:
double = True
return double
def valid2(num):
dig = list(str(num))
cur = None
count = 0
for d in dig:
if cur == d:
count += 1
continue
if count == 2:
return True
cur = d
count = 1
if count == 2:
return True
return False
valid_nums = []
valid2_nums = []
for num in range(start, end):
if valid(num):
valid_nums.append(num)
if valid2(num):
valid2_nums.append(num)
print("Answer 1:", len(valid_nums))
print("Answer 2:", len(valid2_nums))
| korylprince/adventofcode | 2019/04/main.py | main.py | py | 936 | python | en | code | 1 | github-code | 13 |
20653680157 | """
This script makes a cross section plot of the density quintiles.
"""
import os
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from astropy.io import fits
from densitysplit.pipeline import DensitySplit
def get_data_positions(data_fn, split='z', los='z'):
"""
Retrieves halo positions in real or redshift space.
"""
with fits.open(data_fn) as hdul:
mock_data = hdul[1].data
if split == 'z':
xgal = mock_data['X_RSD'] if los == 'x' else mock_data['X']
ygal = mock_data['Y_RSD'] if los == 'y' else mock_data['Y']
zgal = mock_data['Z_RSD'] if los == 'z' else mock_data['Z']
else:
xgal = mock_data['X']
ygal = mock_data['Y']
zgal = mock_data['Z']
return np.c_[xgal, ygal, zgal]
if __name__ == '__main__':
# set the relevant parameters
split = 'r'
los = 'z'
boxsize = 500
smooth_ds = 20
nquantiles = 5
cellsize = 5
cross_section_dim = 'z'
cross_section_val = boxsize/2
for sim_num in range(3000, 3001):
data_fn = os.path.join(
'/home/jgmorawe/projects/rrg-wperciva/AbacusSummit/small',
'AbacusSummit_small_c000_ph{}/halos/z0.575'.format(sim_num),
'halos_small_c000_ph{}_z0.575_nden3.2e-04.fits'.format(sim_num))
if os.path.exists(data_fn):
save_path = os.path.join('/home/jgmorawe/results/plot_data/cross_sections',
'cross_section_sim{0}_z={1}.png'.format(
sim_num, int(cross_section_val)))
halo_positions = get_data_positions(data_fn=data_fn, split=split, los=los)
ds_object = DensitySplit(data_positions=halo_positions, boxsize=boxsize)
grid_dim = 500
fig, ax = plt.subplots(nrows=3, ncols=1, sharex=True, sharey=True,
figsize=(5*1.1, 15*1.1), dpi=500)
ax[0].set(aspect='equal')
ax[1].set(aspect='equal')
ax[2].set(aspect='equal')
#gridspec_kw={'width_ratios': [1, 1.2, 1]})
#fig.subplots_adjust(hspace=0.1)
if cross_section_dim == 'z':
x_edges = np.linspace(0, boxsize, grid_dim+1)
y_edges = np.linspace(0, boxsize, grid_dim+1)
x_centres = np.array([np.mean(x_edges[i:i+2]) for i in range(len(x_edges)-1)])
y_centres = np.array([np.mean(y_edges[i:i+2]) for i in range(len(y_edges)-1)])
xy_grid = np.meshgrid(x_centres, np.flip(y_centres))
sampling_x = xy_grid[0].flatten()
sampling_y = xy_grid[1].flatten()
sampling_z = np.array([cross_section_val for i in range(grid_dim**2)])
sampling_positions = np.vstack((sampling_x, sampling_y, sampling_z)).T
density = ds_object.get_density(
smooth_radius=smooth_ds, cellsize=cellsize,
sampling_positions=sampling_positions).reshape(grid_dim, grid_dim)
quantiles = ds_object.get_quantiles(nquantiles=nquantiles)
density1 = ds_object.get_density(
smooth_radius=smooth_ds, cellsize=cellsize,
sampling_positions=quantiles[0])
density2 = ds_object.get_density(
smooth_radius=smooth_ds, cellsize=cellsize,
sampling_positions=quantiles[1])
density3 = ds_object.get_density(
smooth_radius=smooth_ds, cellsize=cellsize,
sampling_positions=quantiles[2])
density4 = ds_object.get_density(
smooth_radius=smooth_ds, cellsize=cellsize,
sampling_positions=quantiles[3])
density5 = ds_object.get_density(
smooth_radius=smooth_ds, cellsize=cellsize,
sampling_positions=quantiles[4])
avg_dens1 = np.mean(density1)
avg_dens2 = np.mean(density2)
avg_dens3 = np.mean(density3)
avg_dens4 = np.mean(density4)
avg_dens5 = np.mean(density5)
dens_edges = np.percentile(density, np.arange(0, 120, 20))
avg_densities = [avg_dens1, avg_dens2, avg_dens3, avg_dens4, avg_dens5]
halo_positions_slice = halo_positions[
(halo_positions[:, 2] < cross_section_val + 25) &
(halo_positions[:, 2] > cross_section_val - 25)]
ax[0].plot(halo_positions_slice[:, 0], halo_positions_slice[:, 1], 'o',
color='black', markersize=0.5)
cmap = 'turbo'
im = ax[1].imshow(X=density,cmap=cmap, vmin=-1, vmax=4, aspect='equal',
extent=[0, 500, 0, 500])
# fig.colorbar(im, ax=ax[1], pad=0.025)
density_quantile = np.zeros(len(density.flatten()))
for i in range(len(density_quantile)):
dens_val = density.flatten()[i]
if dens_edges[0] <= dens_val < dens_edges[1]:
density_quantile[i] = avg_densities[0]
elif dens_edges[1] <= dens_val < dens_edges[2]:
density_quantile[i] = avg_densities[1]
elif dens_edges[2] <= dens_val < dens_edges[3]:
density_quantile[i] = avg_densities[2]
elif dens_edges[3] <= dens_val < dens_edges[4]:
density_quantile[i] = avg_densities[3]
elif dens_edges[4] <= dens_val < dens_edges[5]:
density_quantile[i] = avg_densities[4]
density_quantile = density_quantile.reshape(grid_dim, grid_dim)
im2 = ax[2].imshow(X=density_quantile, cmap=cmap, vmin=np.min(density), vmax=np.max(density), aspect='equal',
extent=[0, 500, 0, 500])
colors = [im.cmap(im.norm(avg_dens)) for avg_dens in avg_densities]
patches = [mpatches.Patch(color=colors[i], label="DS{}".format(i+1)) for i in range(len(avg_densities))]
ax[2].legend(handles=patches, bbox_to_anchor=(0.73, 0.99), loc=2, borderaxespad=0, framealpha=0.6)
#fig.colorbar(im2, ax=ax[2], pad=0.025)
ax[0].set_xlim(0, boxsize); ax[0].set_ylim(0, boxsize)
ax[1].set_xlim(0, boxsize); ax[1].set_ylim(0, boxsize)
ax[2].set_xlim(0, boxsize); ax[2].set_ylim(0, boxsize)
ax[0].set_ylabel('Y (Mpc/h)')
ax[0].set_xlabel('X (Mpc/h)')
ax[1].set_xlabel('X (Mpc/h)')
ax[2].set_xlabel('X (Mpc/h)')
ax[0].set_title('Halo Positions')
ax[1].set_title('Smoothed Overdensity')
ax[2].set_title('Quintiles')
fig.savefig(save_path)
print('Job {} done.'.format(sim_num))
else:
print('Must plot for a z cross section.')
else:
print('Doesnt exist')
"""
x_centres = np.linspace(0, boxsize, grid_dim)
y_centres = np.linspace(0, boxsize, grid_dim)
xy_grid = np.meshgrid(x_centres, y_centres)
sampling_x = xy_grid[0].flatten()
sampling_y = xy_grid[1].flatten()
sampling_z = np.array([cross_section_val for i in range(grid_dim**2)])
sampling_positions = np.vstack((sampling_x, sampling_y, sampling_z)).T
density = ds_object.get_density(
smooth_radius=smooth_ds, cellsize=cellsize,
sampling_positions=sampling_positions)
quantiles = ds_object.get_quantiles(nquantiles=nquantiles)
avg_densities = [avg_dens1, avg_dens2, avg_dens3, avg_dens4, avg_dens5]
halo_positions_slice = halo_positions[
(halo_positions[:, 2] < cross_section_val + smooth_ds/2) &
(halo_positions[:, 2] > cross_section_val - smooth_ds/2)]
ax[0].plot(halo_positions_slice[:, 0], halo_positions_slice[:, 1], 'o',
color='black', markersize=2)
cmap='turbo'
im=ax[1].scatter(x=sampling_x, y=sampling_y, s=2, c=density, cmap=cmap,
vmin=-1, vmax=4, marker='s')
fig.colorbar(im, ax=ax[1], pad=0.025)
for i, mean_dens in zip(range(0, 5), avg_densities):
size = len(quantiles[i][:,0])
ax[2].scatter(x=quantiles[i][:, 0], y=quantiles[i][:, 1], s=2,
c=np.full(size, mean_dens),
cmap=cmap, label='DS{}'.format(i+1), vmin=-1, vmax=4,
marker='s')
ax[0].set_xlim(0, boxsize); ax[0].set_ylim(0, boxsize)
ax[1].set_xlim(0, boxsize); ax[1].set_ylim(0, boxsize)
ax[2].set_xlim(0, boxsize); ax[2].set_ylim(0, boxsize)
ax[0].set_ylabel('Y (Mpc/h)')
ax[0].set_xlabel('X (Mpc/h)')
ax[1].set_xlabel('X (Mpc/h)')
ax[2].set_xlabel('X (Mpc/h)')
ax[0].set_title('Halo Positions')# Z $\in$ ({0},{1})'.format(
# int(cross_section_val - smooth_ds/2),
# int(cross_section_val + smooth_ds/2)))
ax[1].set_title('Smoothed Overdensity')#'Z={0} h^{-1}Mpc, $R_s$ = {1}'.format(
# int(cross_section_val), int(smooth_ds)))
ax[2].set_title('Quintiles')
ax[2].legend()
fig.savefig(save_path)
print('Job {} done.'.format(sim_num))
else:
print('Must plot for a z cross section.')
""" | jgmorawetz/multitracer_densitysplit | scripts/cross_sections.py | cross_sections.py | py | 10,317 | python | en | code | 0 | github-code | 13 |
73546687057 | import itertools
import copy
n = 12
x = 4
lst = [2**i for i in range(n)]
base_groups = [[] for i in itertools.repeat(None, x)]
base_num = [0 for i in itertools.repeat(None, n)]
def split_to_groups(mask):
groups = copy.deepcopy(base_groups)
for i, digit in enumerate(mask):
groups[digit].append(lst[i])
return [sum(group) for group in groups]
# big endian
def num_base_n(num, base):
ret = copy.deepcopy(base_num)
i = 0
while num > 0:
mod = num % base
ret[i] = mod
num //= base
i += 1
return ret
def num_groups(mask):
return len(set(mask))
final = set()
for i in range(x**n):
mask = num_base_n(i, x)
if num_groups(mask) != x:
continue
cur = split_to_groups(mask)
final.add((tuple(sorted(cur))))
print(len(final)) | BigB00st/ctf-solutions | rgbCTF/misc/creative-algo/solve.py | solve.py | py | 861 | python | en | code | 2 | github-code | 13 |
31565622110 | import math
def taxi_distance(n):
# find "taxi distance" of point on spiral to center 1
# round square root to next odd number ~ rotation
if n == 1:
return 0
c = math.ceil(math.sqrt(n))
if c % 2 == 0:
c += 1
r = (c - 1) / 2
lr = c ** 2
cp = lr - n
ep = cp % (2 * r)
if ep <= r:
nd = r + r - ep
elif ep > r:
nd = r + (ep - r)
return int(nd)
n = int(input('n? '))
print('taxi distance of %s is %s' % (n, taxi_distance(n)))
| galgeek/advent2017 | 3-1.py | 3-1.py | py | 511 | python | en | code | 0 | github-code | 13 |
8668121724 | #!/usr/bin/env python
import scapy.all as scapy
def scan(ip):
arp_request = scapy.ARP(pdst=ip)
broadcast = scapy.Ether(dst="ff:ff:ff:ff:ff:ff")
arp_request_broadcast = broadcast / arp_request
answered_list = scapy.srp(arp_request_broadcast, verbose=False, timeout=10)[0]
client_list = []
for element in answered_list:
client_dict = {"ip": element[1].psrc, "mac": element[1].hwsrc}
client_list.append(client_dict)
return client_list
def run_netscan(command):
while True:
try:
result = scan(command)
print("IP\t\t\tMAC Address\n-------------------------------------")
for client in result:
print((client["ip"] + "\t\t" + client["mac"]))
break
except Exception:
print("Error in Execution")
| alr0cks/alsploit-mitm-toolkit | networkscan/netscan.py | netscan.py | py | 834 | python | en | code | 0 | github-code | 13 |
73498352977 | script_create_table_especiais = lambda dados = {} : """
DROP TABLE IF EXISTS Especiais;
CREATE TABLE Especiais (
id int NOT NULL PRIMARY KEY,
nome text NOT NULL,
idioma INTEGER NOT NULL,
ref int NOT NULL,
FOREIGN KEY (idioma)
REFERENCES Idiomas(id)
);
"""
script_insert_table_especiais = lambda dados = {} : """INSERT INTO Especiais (id, nome, idioma, ref) VALUES (?, ?, ?, ?);"""
dados_padrao_tabela_especiais = lambda dados = {} : [
(1,'special', 1, 1),
(2,'specials', 1, 1)
(3,'especial', 2, 1),
(4,'especiais', 2, 1),
] | LeandroLFE/capmon | db/default_data/dados_especiais.py | dados_especiais.py | py | 830 | python | pt | code | 0 | github-code | 13 |
31321394322 | # coding=utf-8
__author__ = "AstroPrint Product Team <product@astroprint.com>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2017 3DaGoGo, Inc - Released under terms of the AGPLv3 License"
# singleton
_instance = None
def printerProfileManager():
global _instance
if _instance is None:
_instance = PrinterProfileManager()
return _instance
import os
import yaml
import logging
import shutil
import uuid
from octoprint.settings import settings
from astroprint.plugin import pluginManager
from astroprint.util import merge_dict
from astroprint.manufacturerpkg import manufacturerPkgManager
from astroprint.cloud import astroprintCloud, AstroPrintCloudNoConnectionException
class PrinterProfileManager(object):
def __init__(self):
self._settings = settings()
configDir = self._settings.getConfigFolder()
self._infoFile = "%s/printer-profile.yaml" % configDir
self._logger = logging.getLogger(__name__)
self.data = {
'last_definition_version': None,
'driver': "marlin",
'plugin': None,
'extruder_count': 1,
'check_clear_bed' : True,
'max_nozzle_temp': 280,
'max_bed_temp': 140,
'heated_bed': True,
'cancel_gcode': ['G28 X0 Y0'],
'invert_z': False,
'invert_x': False,
'invert_y': False,
'manufacturer_id': None,
'printer_model': {
'id': None,
'name': None
},
'filament': {
'color': None,
'name': None
},
'temp_presets' : {
'3e0fc9b398234f2f871310c1998aa000': {
'name' : "PLA",
'nozzle_temp' : 220,
'bed_temp' : 40
},
'2cc9df599f3e4292b379913f4940c000': {
'name' : "ABS",
'nozzle_temp' : 230,
'bed_temp' : 80
},
},
'last_presets_used' : {}
}
config = None
if not os.path.isfile(self._infoFile):
factoryFile = "%s/printer-profile.factory" % configDir
if os.path.isfile(factoryFile):
with open(factoryFile, "r") as f:
config = yaml.safe_load(f)
if not config:
config = {}
if config:
merge_dict(self.data, config)
self.save()
else:
with open(self._infoFile, "r") as f:
config = yaml.safe_load(f)
if config:
# remove old array formats
if 'temp_presets' in config and isinstance(config['temp_presets'], list) == True:
del config['temp_presets']
if 'last_presets_used'in config and isinstance(config['last_presets_used'], list) == True:
del config['last_presets_used']
merge_dict(self.data, config)
# check manufacturer definition update
version = manufacturerPkgManager().version
mfDefProfile = manufacturerPkgManager().printerProfile
mfConfig = {}
if version != self.data['last_definition_version']:
self._logger.info("A New update for manufacturer package has been found: %s" % (version))
mfDefVariant = manufacturerPkgManager().variant
for k in mfDefProfile.keys():
v = mfDefProfile[k]
if v is not None:
mfConfig[k] = v
if k == "temp_presets":
for mfPresetID in v.keys():
p = mfConfig[k][mfPresetID]
if self.data[k] is not None:
dKey = self._checkPresetExisted(k, mfPresetID)
if dKey:
# if manufacturer updates its preset and user it's not allowed to edit => REPLACE
if mfPresetID and mfDefVariant['temperature_presets_edit'] is False:
mfConfig[k][dKey] = {
"manufacturer_id": mfPresetID,
"name": p['name'],
"bed_temp": p['bed_temp'],
"nozzle_temp": p['nozzle_temp'],
}
del mfConfig[k][mfPresetID]
# if manfufacturer updates its preset and user it's allowed to edit => check if different ID. This way is user has edited a preset, and manufacturer update it after using same ID, it wont be overwritten but ignored it.
else:
matchedId = ""
for i in self.data['temp_presets']:
if "manufacturer_id" in self.data['temp_presets'][i]:
if self.data['temp_presets'][i]['manufacturer_id'] == mfPresetID:
matchedId = mfPresetID
if not matchedId:
mfConfig[k][dKey] = {
"manufacturer_id": mfPresetID,
"name": p['name'],
"bed_temp": p['bed_temp'],
"nozzle_temp": p['nozzle_temp'],
}
else:
del mfConfig[k][mfPresetID]
else:
# Add new attribute object with correct format
mfConfig[k][uuid.uuid4().hex] = {
"manufacturer_id": mfPresetID,
"name": p['name'],
"bed_temp": p['bed_temp'],
"nozzle_temp": p['nozzle_temp'],
}
del mfConfig[k][mfPresetID]
else:
mfConfig[k][uuid.uuid4().hex] = {
"manufacturer_id": mfPresetID,
"name": p['name'],
"bed_temp": p['bed_temp'],
"nozzle_temp": p['nozzle_temp'],
}
del mfConfig[k][mfPresetID]
# update version number
self.data['last_definition_version'] = version
if version or mfConfig:
if "temp_presets" in mfConfig.keys() or version:
self._removeDefaultTempPresets()
merge_dict(self.data, mfConfig)
self.save()
def save(self):
with open(self._infoFile, "wb") as infoFile:
yaml.safe_dump(self.data, infoFile, default_flow_style=False, indent=" ", allow_unicode=True)
def set(self, changes):
for k in changes:
if k in self.data:
if self.data[k] != changes[k]:
if k == 'driver':
#change printer object
from astroprint.printer.manager import printerManager
try:
printerManager(changes['driver'])
except Exception as e:
self._logger.error("Error selecting driver %s: %s" % (changes['driver'], e), exc_info=True)
#revent to previous driver
printerManager(self.data['driver'])
raise Exception("unable_to_change_driver")
elif k == 'printer_model':
data = {
"printerModel": changes[k]
}
astroprintCloud().updateBoxrouterData(data)
self.data[k] = self._clean(k, changes[k])
# Send astrobox event
from octoprint.events import eventManager, Events
if k == 'filament':
eventManager().fire(Events.FILAMENT_CHANGE, { k: self.data[k]})
eventManager().fire(Events.PRINTERPROFILE_CHANGE, { k: self.data[k]})
else:
self._logger.error("trying to set unkonwn printer profile field %s to %s" % (k, str(changes[k])))
def driverChoices(self):
plugins = pluginManager().getPluginsByProvider('printerComms')
result = { ("plugin:%s" % k) : { 'name': plugins[k].definition['name'], 'properties': plugins[k].settingsProperties } for k in plugins }
result.update({
'marlin': {'name': 'GCODE - Marlin / Repetier Firmware', 'properties': {'customCancelCommands': True}},
's3g': {'name': 'X3G - Sailfish / Makerbot Firmware', 'properties': {'customCancelCommands': False}}
})
return result
def createTempPreset(self, name, nozzle_temp, bed_temp):
id = uuid.uuid4().hex
temp_update = { 'name' : name, 'nozzle_temp' : int(nozzle_temp), 'bed_temp' : int(bed_temp)}
changes = self.data.copy()
changes['temp_presets'][id] = temp_update
self.set(changes)
self.save()
return id
def _clean(self, field, value):
if field in ['extruder_count', 'max_nozzle_temp', 'max_bed_temp']:
return int(value)
elif field == 'heated_bed':
return bool(value)
else:
return value
def _checkPresetExisted(self, key, presetID):
for dkey in self.data[key].keys():
if "manufacturer_id" in self.data[key][dkey]:
if self.data[key][dkey]['manufacturer_id'] == presetID:
return dkey
def _removeDefaultTempPresets(self):
if "3e0fc9b398234f2f871310c1998aa000" in self.data['temp_presets']:
del self.data['temp_presets']['3e0fc9b398234f2f871310c1998aa000']
if "2cc9df599f3e4292b379913f4940c000" in self.data['temp_presets']:
del self.data['temp_presets']['2cc9df599f3e4292b379913f4940c000']
| AstroPrint/AstroBox | src/astroprint/printerprofile/__init__.py | __init__.py | py | 7,896 | python | en | code | 158 | github-code | 13 |
22376124812 | def isPrime(n):
"""Returns True if n is prime."""
if n == 2:
return True
if n == 3:
return True
if n % 2 == 0:
return False
if n % 3 == 0:
return False
i = 5
w = 2
while i * i <= n:
if n % i == 0:
return False
i += w
w = 6 - w
return True
count = 2
sum = 0
while count < 2000000:
if isPrime(count):
sum = sum + count
count = count + 1
print(sum)
| ZacJoffe/competitive-programming | Python/Project Euler/sum_of_primes.py | sum_of_primes.py | py | 503 | python | en | code | 0 | github-code | 13 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.