seq_id string | text string | repo_name string | sub_path string | file_name string | file_ext string | file_size_in_byte int64 | program_lang string | lang string | doc_type string | stars int64 | dataset string | pt string | api list |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2080694356 | import WMD.calWMD as WMD
import calculatePrecision
import os
import utils
rqPredictPath = utils.RelevancePath
WMDPath = utils.rootPath + r'\WMD.txt'
def getWMDPrecision(topK=5):
high_precision = 0.0
mid_precision = 0.0
low_precision = 0.0
sum_NDCG = 0.0
count = 0
for file in os.listdir(rqPredictPath):
confusionMatrix1 = [[0], [0]]
confusionMatrix2 = [[0], [0]]
confusionMatrix3 = [[0], [0]]
full_path = os.path.join(rqPredictPath, file)
if os.path.isdir(full_path):
continue
count += 1
results = WMD.get_topK_relevance(file, topK=topK)
topPredict, _ = zip(*results)
confusionMatrix1, NDCG = calculatePrecision.calHighRelevancePrecision(
file, topPredict, confusionMatrix1, topK)
confusionMatrix2, _ = calculatePrecision.calHighAndMidPrecision(
file, topPredict, confusionMatrix2, topK)
confusionMatrix3, _ = calculatePrecision.calHighAndMidAndLowPrecision(
file, topPredict, confusionMatrix3, topK)
tp1 = confusionMatrix1[0][0]
fp1 = confusionMatrix1[1][0]
tp2 = confusionMatrix2[0][0]
fp2 = confusionMatrix2[1][0]
tp3 = confusionMatrix3[0][0]
fp3 = confusionMatrix3[1][0]
high_precision += tp1/(tp1+fp1)
mid_precision += tp2/(tp2+fp2)
low_precision += tp3/(tp3+fp3)
sum_NDCG += NDCG
high_precision = high_precision/count
mid_precision = mid_precision/count
low_precision = low_precision/count
NDCG = sum_NDCG/count
with open(WMDPath, 'a') as f:
f.write(str(topK) + ':\n')
f.write('high_precision\t'+str(high_precision) + '\n')
f.write('mid_precision\t'+str(mid_precision)+'\n')
f.write('low_precision\t'+str(low_precision)+'\n')
f.write('NDCG\t'+str(NDCG) + '\n')
f.write('--------------------------\n')
return high_precision, mid_precision, low_precision, NDCG
if __name__ == '__main__':
*_, p1, n1 = getWMDPrecision(topK=5)
*_, p2, n2 = getWMDPrecision(topK=10)
*_, p3, n3 = getWMDPrecision(topK=15)
*_, p4, n4 = getWMDPrecision(topK=20)
with open(WMDPath, 'a') as f:
f.write(
'ave-fin:\nprecision:{:.4},ndcg:{:.4}'.format((p1+p2+p3+p4)/4, (n1+n2+n3+n4)/4))
| Ylizin/RWSim | ylSim/WMDResult.py | WMDResult.py | py | 2,324 | python | en | code | 2 | github-code | 36 | [
{
"api_name": "utils.RelevancePath",
"line_number": 7,
"usage_type": "attribute"
},
{
"api_name": "utils.rootPath",
"line_number": 8,
"usage_type": "attribute"
},
{
"api_name": "os.listdir",
"line_number": 18,
"usage_type": "call"
},
{
"api_name": "os.path.join",
... |
11591654822 | import logging
import sys
import time
from contextlib import contextmanager
from loguru import logger
@contextmanager
def log(desc):
logger.info(f"Function running: {desc}")
start = time.time()
try:
yield
except Exception as e:
logger.exception(f"Error encountered on: {desc}", e)
raise
finally:
duree = time.time() - start
logger.info(f"Time spent on {desc}: {duree}s")
def configure_logger():
"""Configure the logger. The logs would be written to stdout, log.log and debug.log"""
DEBUG = "<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> | <level>{message}</level>" # noqa: E501
INFO = "<level>{message}</level>"
handlers = [
{"sink": sys.stderr, "level": "INFO", "format": INFO},
{"sink": "log.log", "level": "INFO", "format": DEBUG},
{"sink": "debug.log", "level": "DEBUG", "format": DEBUG},
]
if "pytest" in sys.modules:
# Only activate stderr in unittest
handlers = handlers[:1]
logger.configure(handlers=handlers)
# Intercept standard logging messages toward your Loguru sinks
# https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging
logging.basicConfig(handlers=[InterceptHandler()], level=logging.INFO)
def mute_logger():
logger.remove()
logger.add(sys.stderr, level="WARNING")
class InterceptHandler(logging.Handler):
def emit(self, record):
# Ignore some over-verbose useless logs
name = record.name.split(".")[0]
if name in ("tensorboard"):
return
# Get corresponding Loguru level if it exists
try:
level = logger.level(record.levelname).name
except ValueError:
level = record.levelno
# Find caller from where originated the logged message
frame, depth = logging.currentframe(), 2
while frame.f_code.co_filename == logging.__file__:
frame = frame.f_back
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
| NicoLivesey/zemmourify | zemmourify/logs.py | logs.py | py | 2,135 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "loguru.logger.info",
"line_number": 11,
"usage_type": "call"
},
{
"api_name": "loguru.logger",
"line_number": 11,
"usage_type": "name"
},
{
"api_name": "time.time",
"line_number": 12,
"usage_type": "call"
},
{
"api_name": "loguru.logger.exception",
... |
28985800071 | import click
from aoc_2022_kws.cli import main
from aoc_2022_kws.config import config
from aocd import submit
snafu_chars = "=-012"
def snafu_val(value: str):
return snafu_chars.index(value) - 2
def snafu_to_base10(value: str):
return sum([snafu_val(c) * 5**i for i, c in enumerate(value[::-1])])
rsnafu_chars = "012=-"
def base10_to_snafu(value: int):
"""
A function from converting base 10 numbers to SNAFU numbers
0 is 000
2 is 002
3 is 01=
4 is 01-
5 is 010
6 is 011
7 is 012
8 is 02=
9 is 02-
10 is 020
11 is 021
12 is 022
13 is 1==
14 is 1=-
15 is 1=0
20 is 1-0
21 is 1-1
22 is 1-2
23 is 10=
24 is 10-
25 is 100
26 is 101
27 is 102
28 is 11=
29 is 11-
30 is 110
First digit is given by:
x % 5 for sequence 0, 1, 2, =, -"""
if value + 2 < 5:
return rsnafu_chars[value]
return base10_to_snafu((value + 2) // 5) + rsnafu_chars[value % 5]
@main.command()
@click.option("--sample", "-s", is_flag=True)
def day25(sample):
if sample:
input_data = (config.SAMPLE_DIR / "day25.txt").read_text()
else:
input_data = (config.USER_DIR / "day25.txt").read_text()
for line in input_data.splitlines():
print(line, snafu_to_base10(line))
answer = sum([snafu_to_base10(line) for line in input_data.splitlines()])
print(f"Answer: {answer} {base10_to_snafu(answer)}")
if not sample:
submit(base10_to_snafu(answer), part="a", day=25, year=2022)
| SocialFinanceDigitalLabs/AdventOfCode | solutions/2022/kws/aoc_2022_kws/day_25.py | day_25.py | py | 1,557 | python | en | code | 2 | github-code | 36 | [
{
"api_name": "aoc_2022_kws.config.config.SAMPLE_DIR",
"line_number": 62,
"usage_type": "attribute"
},
{
"api_name": "aoc_2022_kws.config.config",
"line_number": 62,
"usage_type": "name"
},
{
"api_name": "aoc_2022_kws.config.config.USER_DIR",
"line_number": 64,
"usage_typ... |
25628618035 | import matplotlib.pyplot as plt
def diagram(data, epsilon, beta):
plt.figure()
plt.fill_between(data.index, data - epsilon, data + epsilon, alpha = 0.3)
plt.plot(data.index, data, linewidth=0.5)
if beta is not None:
plt.plot([0, 199],[beta, beta], color = "r", linestyle = "--", linewidth = 0.5)
plt.xlabel('n')
plt.ylabel('mV')
plt.show()
| tronyaginaa/math_statistics | lab4/diagram.py | diagram.py | py | 378 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "matplotlib.pyplot.figure",
"line_number": 4,
"usage_type": "call"
},
{
"api_name": "matplotlib.pyplot",
"line_number": 4,
"usage_type": "name"
},
{
"api_name": "matplotlib.pyplot.fill_between",
"line_number": 5,
"usage_type": "call"
},
{
"api_name":... |
14438215512 | import psycopg2
read_sql = "SELECT num, data FROM test"
conn = None
try:
# connect to the PostgreSQL database
conn = psycopg2.connect(
dbname='spacedys',
host='localhost',
user='spacedys',
password='password')
# create a new cursor
cur = conn.cursor()
# execute the SELECT statement
cur.execute(read_sql)
results = cur.fetchmany(10)
for result in results:
print(result)
# commit the changes to the database
conn.commit()
# close communication with the database
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close() | nicolacammillini/spacedys | docs/demos/db/read-pg.py | read-pg.py | py | 693 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "psycopg2.connect",
"line_number": 7,
"usage_type": "call"
},
{
"api_name": "psycopg2.DatabaseError",
"line_number": 26,
"usage_type": "attribute"
}
] |
8473942040 | #!/usr/bin/env python
# https://gist.github.com/tigercosmos/a5af5359b81b99669ef59e82839aed60
##
##
##
# coding: utf-8
import numpy as np
import cv2
import os
import math
from cyvlfeat.kmeans import kmeans
from scipy import ndimage
from scipy.spatial import distance
from tqdm import tqdm
import pickle
from cyvlfeat.kmeans import kmeans
from cyvlfeat.sift.dsift import dsift
from libsvm.svmutil import *
"""
Image Hierarchy:
data
- train
- class1
- class2
- ...
- test
- class1
- class2
- ...
"""
def get_images(path, size):
total_pic = {}
labels = []
for i, doc in enumerate(os.listdir(path)):
tmp = []
for file in os.listdir(os.path.join(path, doc)):
if file.endswith(".jpg"):
img = cv2.imread(os.path.join(path, doc, file), cv2.IMREAD_GRAYSCALE)
pic = cv2.resize(img, (size, size))
tmp.append(pic)
labels.append(i)
total_pic[doc] = tmp
return total_pic, labels
# get images with resize
train, train_digit_labels = get_images('./data/train/', 256)
test, test_digit_labels = get_images('./data/test/', 256)
# visual_words
def sift_features(images, size):
print("feature number", size)
bag_of_features = []
print("Extract SIFT features...")
for key, value in tqdm(images.items()):
for img in value:
# orb = cv2.xfeatures2d.SIFT_create(500)
# orb = cv2.ORB_create()
# keypoints, descriptors = orb.detectAndCompute(img, None)
_, descriptors = dsift(img, step=[5,5], fast=True)
if descriptors is not None:
for des in descriptors:
bag_of_features.append(des)
print("Compute kmeans in dimensions:", size)
km = kmeans(np.array(bag_of_features).astype('float32'), size, initialization="PLUSPLUS")
return km
features = sift_features(train, size=15)
def image_class(images, features):
image_feats = []
print("Construct bags of sifts...")
for key, value in tqdm(images.items()):
empty = [0 for i in range(0, len(features))]
for img in value:
# orb = cv2.ORB_create()
# orb = cv2.xfeatures2d.SIFT_create()
# keypoints, descriptors = orb.detectAndCompute(img, None)
_, descriptors = dsift(img, step=[5,5], fast=True)
if descriptors is not None:
dist = distance.cdist(features, descriptors, metric='euclidean')
idx = np.argmin(dist, axis=0)
hist, bin_edges = np.histogram(idx, bins=len(features))
hist_norm = [float(i)/sum(hist) for i in hist]
image_feats.append(hist_norm)
else:
print("NONE")
image_feats.append(empty)
image_feats = np.asarray(image_feats)
return image_feats
bovw_train = image_class(train, features)
bovw_test = image_class(test, features)
def nearest_neighbor_classify(train_image_feats, train_labels, test_image_feats, K=50):
dist = distance.cdist(test_image_feats, train_image_feats, metric='euclidean')
test_predicts = []
for test in dist:
label_count = {}
for key in train.keys():
label_count[key] = 0
idx = np.argsort(test)
for i in range(K):
cat = train_labels[idx[i]]
label_count[cat] += 1
final = ""
max_value = 0
for key in label_count:
if label_count[key] > max_value:
final = key
max_value = label_count[key]
test_predicts.append(final)
return test_predicts
# In[112]:
train_labels = np.array([item for item in train.keys() for i in range(100)])
test_labels = np.array([item for item in test.keys() for i in range(10)])
knn = nearest_neighbor_classify(bovw_train, train_labels, bovw_test)
# In[114]:
def accuracy(results, test_labels):
num_correct = 0
for i, res in enumerate(results):
if res == test_labels[i]:
num_correct += 1
return num_correct / len(results)
print("Bag of SIFT representation & nearest neighbor classifier \nAccuracy score: {:.1%}".format(accuracy(knn, test_labels)))
# -e: tolerance of termination criterion
# -t 0: linear kernel
# -c: parameter C of C-SVC
m = svm_train(train_digit_labels, bovw_train, '-c 700 -e 0.0001 -t 0')
p_label, p_acc, p_val = svm_predict(test_digit_labels, bovw_test, m)
print("Bag of SIFT representation and linear SVM classifier\nAccuracy score: {:.1%}".format(p_acc))
##
##
| babywyrm/sysadmin | vectorize/sift_wordbag_svm_.py | sift_wordbag_svm_.py | py | 4,666 | python | en | code | 10 | github-code | 36 | [
{
"api_name": "os.listdir",
"line_number": 39,
"usage_type": "call"
},
{
"api_name": "os.listdir",
"line_number": 41,
"usage_type": "call"
},
{
"api_name": "os.path.join",
"line_number": 41,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_number": 41,
... |
21628424114 | """backend URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/4.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.contrib import admin
from django.urls import include, path, re_path
from rest_framework import routers, permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
from drf_yasg.generators import OpenAPISchemaGenerator
## Utility Classes
class BothHttpAndHttpsSchemaGenerator(OpenAPISchemaGenerator):
def get_schema(self, request=None, public=False):
schema = super().get_schema(request, public)
schema.schemes = ["http", "https"]
return schema
class HttpsOnlySchemaGenerator(OpenAPISchemaGenerator):
def get_schema(self, request=None, public=False):
schema = super().get_schema(request, public)
schema.schemes = ["https"]
return schema
router = routers.DefaultRouter()
# Register the viewsets
# Add the swagger view
schema_view = get_schema_view(
openapi.Info(
title="Chitralekha API Docs",
default_version="v1",
description="API documentation for Chitralekha Platform.",
terms_of_service="https://www.google.com/policies/terms/",
contact=openapi.Contact(email="contact@snippets.local"),
license=openapi.License(name="BSD License"),
),
generator_class=BothHttpAndHttpsSchemaGenerator
if settings.DEBUG
else HttpsOnlySchemaGenerator,
public=True,
permission_classes=[permissions.AllowAny],
)
urlpatterns = [
path("", include(router.urls)),
path("admin/", admin.site.urls),
path("users/", include("users.urls")),
path("organization/", include("organization.urls")),
path("project/", include("project.urls")),
path("video/", include("video.urls")),
path("task/", include("task.urls")),
path("translation/", include("translation.urls")),
path("transcript/", include("transcript.urls")),
path("voiceover/", include("voiceover.urls")),
path("youtube/", include("youtube.urls")),
re_path(
r"^swagger(?P<format>\.json|\.yaml)$",
schema_view.without_ui(cache_timeout=0),
name="schema-json",
),
path(
"swagger/",
schema_view.with_ui("swagger", cache_timeout=0),
name="schema-swagger-ui",
),
re_path(
r"^redoc/$", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc"
),
]
| AI4Bharat/Chitralekha-Backend | backend/backend/urls.py | urls.py | py | 2,939 | python | en | code | 18 | github-code | 36 | [
{
"api_name": "drf_yasg.generators.OpenAPISchemaGenerator",
"line_number": 25,
"usage_type": "name"
},
{
"api_name": "drf_yasg.generators.OpenAPISchemaGenerator",
"line_number": 32,
"usage_type": "name"
},
{
"api_name": "rest_framework.routers.DefaultRouter",
"line_number": 3... |
74979913065 | from django.urls import path, re_path
from . import views
urlpatterns= [
path("<int:id>", views.index, name="index"),
path("", views.home, name="home"),
path("upload/", views.upload, name="upload"),
re_path(
r'^delete-image/(?P<id>\d+)/(?P<loc>[a-zA-Z]+)/$',
views.delete_image,
name="delete_image"),
path('search/', views.search, name='search'),
] | kevqyzhu/imagerepo | main/urls.py | urls.py | py | 396 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "django.urls.path",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "django.urls.path",
"line_number": 7,
"usage_type": "call"
},
{
"api_name": "django.urls.path",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "django.urls.re_path",... |
35414698228 | from app.server_process import ServerProcess
from flask import Flask, request
import os
app = Flask(__name__)
def main():
port = int(os.environ.get("PORT", 5000))
app.run(host="0.0.0.0", port=port)
@app.route("/", methods=["POST"])
def process_move():
request_data = request.get_json()
return ServerProcess.server_process(request_data)
if __name__ == "__main__":
main()
| bmraubo/TicTacToe | server.py | server.py | py | 398 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "app.server_process",
"line_number": 6,
"usage_type": "name"
},
{
"api_name": "flask.Flask",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "os.environ.get",
"line_number": 10,
"usage_type": "call"
},
{
"api_name": "os.environ",
"line_nu... |
26376483834 | #!/usr/bin/env python
# coding: utf-8
# In[22]:
# Question 1 d)
# Author: Ilyas Sharif
# Importing required packages
import numpy as np
from scipy.interpolate import RegularGridInterpolator
import matplotlib.pyplot as plt
from random import random
from matplotlib import cm
# Loading in the land data
loaded = np.load('Earth.npz')
data = loaded['data']
lon_array = loaded['lon']
lat_array = loaded['lat']
# Converting to Radians
lon_array = ((lon_array+180)/180)*np.pi
lat_array = ((lat_array+90)/180)*np.pi
# Defining our functions that calulcate the angles theta, phi
# given some random number z
def ftheta(z):
return np.arccos(1 - 2*z)
def fphi(z):
return 2*np.pi*z
# Number of sample points and empty arrays for 2D plot
N = 50000 ### Note: to get the different plots, just change this value ###
land_points = []
water_points = []
# Create nearest interpolator
interp = RegularGridInterpolator((lon_array, lat_array), data, method='nearest')
# Creating points on the "globe"
for i in range(N):
# Create two random numbers to feed into our theta and phi functions
theta = ftheta(random())
phi = fphi(random())
# creating a fix for values above the maximum longtitude
if phi > lon_array[len(lon_array)-1]:
phi = lon_array[len(lon_array)-1]
# Computing the interpolation of the points
# determines if they are near land or not
delta = interp([phi, theta])
if delta == 1:
land_points.append([phi, theta])
else:
water_points.append([phi, theta])
# Chaning to numpy arrays to get more functionality
land_points = np.array(land_points)
water_points = np.array(water_points)
# Printing the land fraction
print("The land fraction is = " + str(len(land_points)/(len(land_points) + len(water_points))) + " for N = " + str(N))
# Creating the 2D plots
plt.scatter(land_points[:,0], land_points[:,1], color = 'g', marker = '.')
plt.scatter(water_points[:,0], water_points[:,1], color = 'b' , marker = '.')
plt.xlabel("Longitude (Radians)")
plt.ylabel("Latitude (Radians)")
plt.title("Random Location Generator - Land / Water points")
plt.show()
# In[ ]:
| SpencerKi/Computational-Methods | Monte Carlo Methods/Lab10_Q1.py | Lab10_Q1.py | py | 2,147 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "numpy.load",
"line_number": 18,
"usage_type": "call"
},
{
"api_name": "numpy.pi",
"line_number": 24,
"usage_type": "attribute"
},
{
"api_name": "numpy.pi",
"line_number": 25,
"usage_type": "attribute"
},
{
"api_name": "numpy.arccos",
"line_numbe... |
74049983784 | import math
import torch
import torch.nn as nn
from torch.nn.parameter import Parameter
from torch.nn.utils.rnn import pad_packed_sequence, pack_padded_sequence
import torch.nn.functional as F
from parlai.utils.torch import neginf
from parlai.core.torch_generator_agent import TorchGeneratorModel
def _transpose_hidden_state(hidden_state):
"""
Transpose the hidden state so that batch is the first dimension.
RNN modules produce (num_layers x batchsize x dim) hidden state, but DataParallel
expects batch size to be first. This helper is used to ensure that we're always
outputting batch-first, in case DataParallel tries to stitch things back together.
"""
if isinstance(hidden_state, tuple):
return tuple(map(_transpose_hidden_state, hidden_state))
elif torch.is_tensor(hidden_state):
return hidden_state.transpose(0, 1)
else:
raise ValueError("Don't know how to transpose {}".format(hidden_state))
def opt_to_kwargs(opt):
"""
Get kwargs for seq2seq from opt.
"""
kwargs = {}
for k in [
'numlayers',
'dropout',
'bidirectional',
'rnn_class',
'lookuptable',
'decoder',
'numsoftmax',
'attention',
'attention_length',
'attention_time',
'input_dropout',
]:
if k in opt:
kwargs[k] = opt[k]
return kwargs
class Seq2seq(TorchGeneratorModel):
"""
Sequence to sequence parent module.
"""
RNN_OPTS = {'rnn': nn.RNN, 'gru': nn.GRU, 'lstm': nn.LSTM}
def __init__(
self,
num_features,
embeddingsize,
hiddensize,
numlayers=2,
dropout=0,
bidirectional=False,
rnn_class='lstm',
lookuptable='unique',
decoder='same',
numsoftmax=1,
attention='none',
attention_length=48,
attention_time='post',
padding_idx=0,
start_idx=1,
end_idx=2,
unknown_idx=3,
input_dropout=0,
longest_label=1,
):
"""
Initialize seq2seq model.
See cmdline args in Seq2seqAgent for description of arguments.
"""
super().__init__(
padding_idx=padding_idx,
start_idx=start_idx,
end_idx=end_idx,
unknown_idx=unknown_idx,
input_dropout=input_dropout,
longest_label=longest_label,
)
self.attn_type = attention
rnn_class = Seq2seq.RNN_OPTS[rnn_class]
self.decoder = RNNDecoder(
num_features,
embeddingsize,
hiddensize,
padding_idx=padding_idx,
rnn_class=rnn_class,
numlayers=numlayers,
dropout=dropout,
attn_type=attention,
attn_length=attention_length,
attn_time=attention_time,
bidir_input=bidirectional,
)
shared_lt = (
self.decoder.lt # share embeddings between rnns
if lookuptable in ('enc_dec', 'all')
else None
)
shared_rnn = self.decoder.rnn if decoder == 'shared' else None
self.encoder = RNNEncoder(
num_features,
embeddingsize,
hiddensize,
padding_idx=padding_idx,
rnn_class=rnn_class,
numlayers=numlayers,
dropout=dropout,
bidirectional=bidirectional,
shared_lt=shared_lt,
shared_rnn=shared_rnn,
unknown_idx=unknown_idx,
input_dropout=input_dropout,
)
shared_weight = (
self.decoder.lt # use embeddings for projection
if lookuptable in ('dec_out', 'all')
else None
)
self.output = OutputLayer(
num_features,
embeddingsize,
hiddensize,
dropout=dropout,
numsoftmax=numsoftmax,
shared_weight=shared_weight,
padding_idx=padding_idx,
)
def reorder_encoder_states(self, encoder_states, indices):
"""
Reorder encoder states according to a new set of indices.
"""
enc_out, hidden, attn_mask = encoder_states
# make sure we swap the hidden state around, apropos multigpu settings
hidden = _transpose_hidden_state(hidden)
# LSTM or GRU/RNN hidden state?
if isinstance(hidden, torch.Tensor):
hid, cell = hidden, None
else:
hid, cell = hidden
if not torch.is_tensor(indices):
# cast indices to a tensor if needed
indices = torch.LongTensor(indices).to(hid.device)
hid = hid.index_select(1, indices)
if cell is None:
hidden = hid
else:
cell = cell.index_select(1, indices)
hidden = (hid, cell)
if self.attn_type != 'none':
enc_out = enc_out.index_select(0, indices)
attn_mask = attn_mask.index_select(0, indices)
# and bring it back to multigpu friendliness
hidden = _transpose_hidden_state(hidden)
return enc_out, hidden, attn_mask
def reorder_decoder_incremental_state(self, incremental_state, inds):
if torch.is_tensor(incremental_state):
# gru or vanilla rnn
return torch.index_select(incremental_state, 0, inds).contiguous()
elif isinstance(incremental_state, tuple):
return tuple(
self.reorder_decoder_incremental_state(x, inds)
for x in incremental_state
)
class UnknownDropout(nn.Module):
"""
With set frequency, replaces tokens with unknown token.
This layer can be used right before an embedding layer to make the model more robust
to unknown words at test time.
"""
def __init__(self, unknown_idx, probability):
"""
Initialize layer.
:param unknown_idx: index of unknown token, replace tokens with this
:param probability: during training, replaces tokens with unknown token
at this rate.
"""
super().__init__()
self.unknown_idx = unknown_idx
self.prob = probability
def forward(self, input):
"""
If training and dropout rate > 0, masks input with unknown token.
"""
if self.training and self.prob > 0:
mask = input.new(input.size()).float().uniform_(0, 1) < self.prob
input.masked_fill_(mask, self.unknown_idx)
return input
class RNNEncoder(nn.Module):
"""
RNN Encoder.
"""
def __init__(
self,
num_features,
embeddingsize,
hiddensize,
padding_idx=0,
rnn_class='lstm',
numlayers=2,
dropout=0.1,
bidirectional=False,
shared_lt=None,
shared_rnn=None,
input_dropout=0,
unknown_idx=None,
sparse=False,
):
"""
Initialize recurrent encoder.
"""
super().__init__()
self.dropout = nn.Dropout(p=dropout)
self.layers = numlayers
self.dirs = 2 if bidirectional else 1
self.hsz = hiddensize
if input_dropout > 0 and unknown_idx is None:
raise RuntimeError('input_dropout > 0 but unknown_idx not set')
self.input_dropout = UnknownDropout(unknown_idx, input_dropout)
if shared_lt is None:
self.lt = nn.Embedding(
num_features, embeddingsize, padding_idx=padding_idx, sparse=sparse
)
else:
self.lt = shared_lt
if shared_rnn is None:
self.rnn = rnn_class(
embeddingsize,
hiddensize,
numlayers,
dropout=dropout if numlayers > 1 else 0,
batch_first=True,
bidirectional=bidirectional,
)
elif bidirectional:
raise RuntimeError('Cannot share decoder with bidir encoder.')
else:
self.rnn = shared_rnn
def forward(self, xs):
"""
Encode sequence.
:param xs: (bsz x seqlen) LongTensor of input token indices
:returns: encoder outputs, hidden state, attention mask
encoder outputs are the output state at each step of the encoding.
the hidden state is the final hidden state of the encoder.
the attention mask is a mask of which input values are nonzero.
"""
bsz = len(xs)
# embed input tokens
xs = self.input_dropout(xs)
xes = self.dropout(self.lt(xs))
attn_mask = xs.ne(0)
try:
x_lens = torch.sum(attn_mask.int(), dim=1).cpu()
xes = pack_padded_sequence(xes, x_lens, batch_first=True)
packed = True
except ValueError:
# packing failed, don't pack then
packed = False
encoder_output, hidden = self.rnn(xes)
if packed:
# total_length to make sure we give the proper length in the case
# of multigpu settings.
# https://pytorch.org/docs/stable/notes/faq.html#pack-rnn-unpack-with-data-parallelism
encoder_output, _ = pad_packed_sequence(
encoder_output, batch_first=True, total_length=xs.size(1)
)
if self.dirs > 1:
# project to decoder dimension by taking sum of forward and back
if isinstance(self.rnn, nn.LSTM):
hidden = (
hidden[0].view(-1, self.dirs, bsz, self.hsz).sum(1),
hidden[1].view(-1, self.dirs, bsz, self.hsz).sum(1),
)
else:
hidden = hidden.view(-1, self.dirs, bsz, self.hsz).sum(1)
return encoder_output, _transpose_hidden_state(hidden), attn_mask
class RNNDecoder(nn.Module):
"""
Recurrent decoder module.
Can be used as a standalone language model or paired with an encoder.
"""
def __init__(
self,
num_features,
embeddingsize,
hiddensize,
padding_idx=0,
rnn_class='lstm',
numlayers=2,
dropout=0.1,
bidir_input=False,
attn_type='none',
attn_time='pre',
attn_length=-1,
sparse=False,
):
"""
Initialize recurrent decoder.
"""
super().__init__()
self.dropout = nn.Dropout(p=dropout)
self.layers = numlayers
self.hsz = hiddensize
self.esz = embeddingsize
self.lt = nn.Embedding(
num_features, embeddingsize, padding_idx=padding_idx, sparse=sparse
)
self.rnn = rnn_class(
embeddingsize,
hiddensize,
numlayers,
dropout=dropout if numlayers > 1 else 0,
batch_first=True,
)
self.attn_type = attn_type
self.attn_time = attn_time
self.attention = AttentionLayer(
attn_type=attn_type,
hiddensize=hiddensize,
embeddingsize=embeddingsize,
bidirectional=bidir_input,
attn_length=attn_length,
attn_time=attn_time,
)
def forward(self, xs, encoder_output, incremental_state=None):
"""
Decode from input tokens.
:param xs: (bsz x seqlen) LongTensor of input token indices
:param encoder_output: output from RNNEncoder. Tuple containing
(enc_out, enc_hidden, attn_mask) tuple.
:param incremental_state: most recent hidden state to the decoder.
If None, the hidden state of the encoder is used as initial state,
and the full sequence is computed. If not None, computes only the
next forward in the sequence.
:returns: (output, hidden_state) pair from the RNN.
- output is a bsz x time x latentdim matrix. If incremental_state is
given, the time dimension will be 1. This value must be passed to
the model's OutputLayer for a final softmax.
- hidden_state depends on the choice of RNN
"""
enc_state, enc_hidden, attn_mask = encoder_output
# in case of multi gpu, we need to transpose back out the hidden state
attn_params = (enc_state, attn_mask)
if incremental_state is not None:
# we're doing it piece by piece, so we have a more important hidden
# seed, and we only need to compute for the final timestep
hidden = _transpose_hidden_state(incremental_state)
# only need the last timestep then
xs = xs[:, -1:]
else:
# starting fresh, or generating from scratch. Use the encoder hidden
# state as our start state
hidden = _transpose_hidden_state(enc_hidden)
if isinstance(hidden, tuple):
hidden = tuple(x.contiguous() for x in hidden)
else:
hidden = hidden.contiguous()
# sequence indices => sequence embeddings
seqlen = xs.size(1)
xes = self.dropout(self.lt(xs))
if self.attn_time == 'pre':
# modify input vectors with attention
# attention module requires we do this one step at a time
new_xes = []
for i in range(seqlen):
nx, _ = self.attention(xes[:, i : i + 1], hidden, attn_params)
new_xes.append(nx)
xes = torch.cat(new_xes, 1).to(xes.device)
if self.attn_time != 'post':
# no attn, we can just trust the rnn to run through
output, new_hidden = self.rnn(xes, hidden)
else:
# uh oh, post attn, we need run through one at a time, and do the
# attention modifications
new_hidden = hidden
output = []
for i in range(seqlen):
o, new_hidden = self.rnn(xes[:, i, :].unsqueeze(1), new_hidden)
o, _ = self.attention(o, new_hidden, attn_params)
output.append(o)
output = torch.cat(output, dim=1).to(xes.device)
return output, _transpose_hidden_state(new_hidden)
class Identity(nn.Module):
def forward(self, x):
return x
class OutputLayer(nn.Module):
"""
Takes in final states and returns distribution over candidates.
"""
def __init__(
self,
num_features,
embeddingsize,
hiddensize,
dropout=0,
numsoftmax=1,
shared_weight=None,
padding_idx=-1,
):
"""
Initialize output layer.
:param num_features: number of candidates to rank
:param hiddensize: (last) dimension of the input vectors
:param embeddingsize: (last) dimension of the candidate vectors
:param numsoftmax: (default 1) number of softmaxes to calculate.
see arxiv.org/abs/1711.03953 for more info.
increasing this slows down computation but can
add more expressivity to the embeddings.
:param shared_weight: (num_features x esz) vector of weights to use as
the final linear layer's weight matrix. default
None starts with a new linear layer.
:param padding_idx: model should output a large negative number for
score at this index. if set to -1 (default),
this is disabled. if >= 0, subtracts one from
num_features and always outputs -1e20 at this
index. only used when shared_weight is not None.
setting this param helps protect gradient from
entering shared embedding matrices.
"""
super().__init__()
self.dropout = nn.Dropout(p=dropout)
self.padding_idx = padding_idx
rng = 1.0 / math.sqrt(num_features)
self.bias = Parameter(torch.Tensor(num_features).uniform_(-rng, rng))
# embedding to scores
if shared_weight is None:
# just a regular linear layer
self.shared = False
self.weight = Parameter(
torch.Tensor(num_features, embeddingsize).normal_(0, 1)
)
else:
# use shared weights and a bias layer instead
self.shared = True
self.weight = shared_weight.weight
self.numsoftmax = numsoftmax
if numsoftmax > 1:
self.esz = embeddingsize
self.softmax = nn.Softmax(dim=1)
self.prior = nn.Linear(hiddensize, numsoftmax, bias=False)
self.latent = nn.Linear(hiddensize, numsoftmax * embeddingsize)
self.activation = nn.Tanh()
else:
# rnn output to embedding
if hiddensize != embeddingsize:
# learn projection to correct dimensions
self.o2e = nn.Linear(hiddensize, embeddingsize, bias=True)
else:
# no need for any transformation here
self.o2e = Identity()
def forward(self, input):
"""
Compute scores from inputs.
:param input: (bsz x seq_len x num_directions * hiddensize) tensor of
states, e.g. the output states of an RNN
:returns: (bsz x seqlen x num_cands) scores for each candidate
"""
# next compute scores over dictionary
if self.numsoftmax > 1:
bsz = input.size(0)
seqlen = input.size(1) if input.dim() > 1 else 1
# first compute different softmax scores based on input vec
# hsz => numsoftmax * esz
latent = self.latent(input)
active = self.dropout(self.activation(latent))
# esz => num_features
logit = F.linear(active.view(-1, self.esz), self.weight, self.bias)
# calculate priors: distribution over which softmax scores to use
# hsz => numsoftmax
prior_logit = self.prior(input).view(-1, self.numsoftmax)
# softmax over numsoftmax's
prior = self.softmax(prior_logit)
# now combine priors with logits
prob = self.softmax(logit).view(bsz * seqlen, self.numsoftmax, -1)
probs = (prob * prior.unsqueeze(2)).sum(1).view(bsz, seqlen, -1)
scores = probs.log()
else:
# hsz => esz, good time for dropout
e = self.dropout(self.o2e(input))
# esz => num_features
scores = F.linear(e, self.weight, self.bias)
if self.padding_idx >= 0:
scores[:, :, self.padding_idx] = neginf(scores.dtype)
return scores
class AttentionLayer(nn.Module):
"""
Computes attention between hidden and encoder states.
See arxiv.org/abs/1508.04025 for more info on each attention type.
"""
def __init__(
self,
attn_type,
hiddensize,
embeddingsize,
bidirectional=False,
attn_length=-1,
attn_time='pre',
):
"""
Initialize attention layer.
"""
super().__init__()
self.attention = attn_type
if self.attention != 'none':
hsz = hiddensize
hszXdirs = hsz * (2 if bidirectional else 1)
if attn_time == 'pre':
# attention happens on the input embeddings
input_dim = embeddingsize
elif attn_time == 'post':
# attention happens on the output of the rnn
input_dim = hsz
else:
raise RuntimeError('unsupported attention time')
# linear layer for combining applied attention weights with input
self.attn_combine = nn.Linear(hszXdirs + input_dim, input_dim, bias=False)
if self.attention == 'local':
# local attention over fixed set of output states
if attn_length < 0:
raise RuntimeError('Set attention length to > 0.')
self.max_length = attn_length
# combines input and previous hidden output layer
self.attn = nn.Linear(hsz + input_dim, attn_length, bias=False)
# combines attention weights with encoder outputs
elif self.attention == 'concat':
self.attn = nn.Linear(hsz + hszXdirs, hsz, bias=False)
self.attn_v = nn.Linear(hsz, 1, bias=False)
elif self.attention == 'general':
# equivalent to dot if attn is identity
self.attn = nn.Linear(hsz, hszXdirs, bias=False)
def forward(self, xes, hidden, attn_params):
"""
Compute attention over attn_params given input and hidden states.
:param xes: input state. will be combined with applied
attention.
:param hidden: hidden state from model. will be used to select
states to attend to in from the attn_params.
:param attn_params: tuple of encoder output states and a mask showing
which input indices are nonzero.
:returns: output, attn_weights
output is a new state of same size as input state `xes`.
attn_weights are the weights given to each state in the
encoder outputs.
"""
if self.attention == 'none':
# do nothing, no attention
return xes, None
if type(hidden) == tuple:
# for lstms use the "hidden" state not the cell state
hidden = hidden[0]
last_hidden = hidden[-1] # select hidden state from last RNN layer
enc_out, attn_mask = attn_params
bsz, seqlen, hszXnumdir = enc_out.size()
numlayersXnumdir = last_hidden.size(1)
if self.attention == 'local':
# local attention weights aren't based on encoder states
h_merged = torch.cat((xes.squeeze(1), last_hidden), 1)
attn_weights = F.softmax(self.attn(h_merged), dim=1)
# adjust state sizes to the fixed window size
if seqlen > self.max_length:
offset = seqlen - self.max_length
enc_out = enc_out.narrow(1, offset, self.max_length)
seqlen = self.max_length
if attn_weights.size(1) > seqlen:
attn_weights = attn_weights.narrow(1, 0, seqlen)
else:
hid = last_hidden.unsqueeze(1)
if self.attention == 'concat':
# concat hidden state and encoder outputs
hid = hid.expand(bsz, seqlen, numlayersXnumdir)
h_merged = torch.cat((enc_out, hid), 2)
# then do linear combination of them with activation
active = F.tanh(self.attn(h_merged))
attn_w_premask = self.attn_v(active).squeeze(2)
elif self.attention == 'dot':
# dot product between hidden and encoder outputs
if numlayersXnumdir != hszXnumdir:
# enc_out has two directions, so double hid
hid = torch.cat([hid, hid], 2)
enc_t = enc_out.transpose(1, 2)
attn_w_premask = torch.bmm(hid, enc_t).squeeze(1)
elif self.attention == 'general':
# before doing dot product, transform hidden state with linear
# same as dot if linear is identity
hid = self.attn(hid)
enc_t = enc_out.transpose(1, 2)
attn_w_premask = torch.bmm(hid, enc_t).squeeze(1)
# calculate activation scores, apply mask if needed
if attn_mask is not None:
# remove activation from NULL symbols
attn_w_premask.masked_fill_((~attn_mask), neginf(attn_w_premask.dtype))
attn_weights = F.softmax(attn_w_premask, dim=1)
# apply the attention weights to the encoder states
attn_applied = torch.bmm(attn_weights.unsqueeze(1), enc_out)
# concatenate the input and encoder states
merged = torch.cat((xes.squeeze(1), attn_applied.squeeze(1)), 1)
# combine them with a linear layer and tanh activation
output = torch.tanh(self.attn_combine(merged).unsqueeze(1))
return output, attn_weights
| facebookresearch/ParlAI | parlai/agents/seq2seq/modules.py | modules.py | py | 24,551 | python | en | code | 10,365 | github-code | 36 | [
{
"api_name": "torch.is_tensor",
"line_number": 23,
"usage_type": "call"
},
{
"api_name": "parlai.core.torch_generator_agent.TorchGeneratorModel",
"line_number": 52,
"usage_type": "name"
},
{
"api_name": "torch.nn.RNN",
"line_number": 57,
"usage_type": "attribute"
},
... |
74065450024 | import discord
from discord.ext import commands
import os
import errno
import datetime as dt
import time
import sys
import traceback
BOT_CHANNELS = [803372255777914911, 803375064816287814, 803380541230940161]
class Events(commands.Cog):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_ready(self):
for guild in self.client.guilds:
if guild.name == os.getenv('GUILD'):
break
print(
f'{self.client.user.name} has connected to '
f'{guild.name} (id: {guild.id})'
)
await self.client.change_presence(activity=discord.Activity(
type=discord.ActivityType.watching, name="you."))
@commands.Cog.listener()
async def on_message(self, message):
if message.author == self.client.user \
or message.channel.id in BOT_CHANNELS:
return
log_message(message)
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
if hasattr(ctx.command, 'on_error'):
return
if isinstance(error, commands.CommandNotFound):
return
if isinstance(error, commands.MissingRequiredArgument):
await ctx.send('Please include required arguments\nFor help use `.help [command]`.')
if isinstance(error, commands.CommandOnCooldown):
time_left = (error.retry_after)/60/60
await ctx.send(f'This command is on cooldown, you can use it in {round(time_left, 1)} hours!')
else:
print('Ignoring exception in command {}:'.format(
ctx.command), file=sys.stderr)
traceback.print_exception(
type(error), error, error.__traceback__, file=sys.stderr)
# Logs all messages to the logs file in bot directory
def log_message(message):
username = str(message.author).split('#')[0]
user_message = str(message.content).replace('\n', '\n\t\t ')
if(message.attachments):
user_message = '<Attachment> ' + user_message
channel = str(message.channel.name)
today = dt.datetime.today()
current_time = time.strftime("%H:%M:%S", time.localtime())
log_file = f'logs/{today.strftime("%Y-%m")}/{dt.date.today()}.txt'
if not os.path.exists(os.path.dirname(log_file)):
try:
os.makedirs(os.path.dirname(log_file))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise # Idk what this whole except part means. just copied from SO
chat_log = open(log_file, 'a', encoding='utf-8')
chat_log.write(f'{current_time} [#{channel}] {username}: {user_message}\n')
chat_log.close()
def setup(client):
client.add_cog(Events(client))
print(f'Loaded {os.path.basename(__file__)} successfully')
| Abearican/Discord-Bot | cogs/events.py | events.py | py | 2,859 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "discord.ext.commands.Cog",
"line_number": 13,
"usage_type": "attribute"
},
{
"api_name": "discord.ext.commands",
"line_number": 13,
"usage_type": "name"
},
{
"api_name": "os.getenv",
"line_number": 21,
"usage_type": "call"
},
{
"api_name": "discord.... |
13624054640 | import io
import os
import sys
from setuptools import setup
if sys.version_info < (3, 6):
sys.exit("Sorry, Python < 3.6.0 is not supported")
DESCRIPTION = "Simple Logger for MPI"
here = os.path.abspath(os.path.dirname(__file__))
try:
with io.open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = "\n" + f.read()
except FileNotFoundError:
long_description = DESCRIPTION
# load __version__
exec(open(os.path.join(here, "mpi_logger", "_version.py")).read())
setup(
name="mpi_logger",
version=__version__,
description=DESCRIPTION,
long_description=long_description,
long_description_content_type="text/markdown",
author="Kazuhiro Serizawa",
author_email="nserihiro@gmail.com",
url="https://github.com/serihiro/mpi_logger",
license="MIT",
packages=["mpi_logger"],
install_requires=["mpi4py"],
)
| serihiro/mpi_logger | setup.py | setup.py | py | 886 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "sys.version_info",
"line_number": 7,
"usage_type": "attribute"
},
{
"api_name": "sys.exit",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "os.path.abspath",
"line_number": 11,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_numb... |
22489393422 | import pymongo
from pymongo import MongoClient, TEXT
import json
def inputJsonName():
'''
Prompts the user for a Json File name
Return: Json File Input (Str)
'''
jsonName = input("Input the json file name you would like to insert. \n")
return jsonName
def inputPortNum():
'''
Prompts the user for a Port Num
Return: Port Num Input (Int)
'''
portNum = int(input("Please input a port number. \n"))
return portNum
def main():
#Prompt user for portNum and insert it into client name
portNum = inputPortNum()
try:
client = MongoClient('mongodb://localhost:' + str(portNum))
#Checks if the database name exists
dbs = client.list_database_names()
if '291db' in dbs:
# if exists removes the database
print("Database already exists.")
db = client["291db"]
db.data.drop()
print("Updating database....")
#Connects database 291db to client and create collection
db = client["291db"]
collection = db["dplb"]
#Prompt user for json fileName and process items one row at a time
#inserts items into collection_list
collection_list = []
fileName = inputJsonName()
with open(fileName) as file:
for item in file:
dic = json.loads(item)
collection_list.append(dic)
#insert collection list into database *deletes collection if it already exists
collection.delete_many({})
collection.insert_many(collection_list)
collection.drop_indexes()
print("Collection Created!")
collection.update_many({}, [{"$set": {"year": {"$toString": "$year"}}}])
#Indexing testing
collection.create_index([('title', TEXT), ('authors', TEXT), ('abstract', TEXT), ('venue', TEXT), ('year', TEXT)], default_language="english")
print(db.dplb.index_information())
#close client when done
client.close()
return true;
except Exception as e:
print("An error has occured with creating the Database: " + e)
print("You can try again.....please ensure database is running: \n")
return false;
if __name__ == "__main__":
while True:
if main():
break
| JFong5/Mini-Project2 | load-json.py | load-json.py | py | 2,393 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "pymongo.MongoClient",
"line_number": 24,
"usage_type": "call"
},
{
"api_name": "json.loads",
"line_number": 45,
"usage_type": "call"
},
{
"api_name": "pymongo.TEXT",
"line_number": 60,
"usage_type": "name"
}
] |
8615782978 | # reference code example from: https://github.com/martin-gorner/tensorflow-mnist-tutorial
from matplotlib import pyplot as plt
from matplotlib.animation import FuncAnimation
import matplotlib.animation as animation
import numpy as np
import datetime
class Visualization:
port = None
ispause = False
am = None
def __init__(self, title, dpi=70):
fig, ax1 = plt.subplots()
plt.gcf().canvas.set_window_title('Deep Q-learning')
#fig = plt.figure()
#fig.set_facecolor('#FFFFFF')
#ax1 = fig.add_subplot(1,2,1)
#ax2 = fig.add_subplot(1,2,2)
ax1.set_title(title)
lineGreen, = ax1.plot([], [], 'go', linewidth=0.5)
lineRed, = ax1.plot([], [], 'r^', linewidth=0.5)
lineClose, = ax1.plot([], [], 'c--', linewidth=0.5)
plt.tight_layout()
def init():
#ax1.set_ylim(0, 1)
#ax1.set_xlim(0, 1)
"""
ax1.set_title(title)
#margin = int( (min(datasetX) - max(datasetX))/10 )
ax1.set_xlim(min(datasetX) , max(datasetX) )
#margin = int( (min(datasetY) - max(datasetY))/10 )
ax1.set_ylim(min(datasetY) , max(datasetY) )
ax2.set_title("Accuracy")
ax2.set_xlim(0, len(accuracyList)) # initial value only, autoscaled after that
ax2.set_ylim(0, max(accuracyList) +5) # not autoscaled
ax3.set_title("Loss")
ax3.set_xlim(0, len(lossList)) # initial value only, autoscaled after that
ax3.set_ylim(0, max(lossList) + 0.1) # not autoscaled
"""
#ax2.set_xlim(0, 10) # initial value only, autoscaled after that
#plt.tight_layout()
return lineGreen, lineRed, lineClose
def update():
# for ax1
buy, sell, price, date = self.port.getAllData()
margin = (max(price) - min(price) ) * 0.1
ax1.set_ylim(min(price) - margin , max(price) + margin)
#ax1.set_ylim(min(price) , max(price))
ax1.set_xlim(date[0], date[-1:])
lineRed.set_data(date, sell)
lineGreen.set_data(date, buy)
lineClose.set_data(date, price)
"""
if self.step == 0 :
# clear all graph
lineGreen.set_data([], [])
lineRed.set_data([], [])
lineClose.set_data([], [])
lineGain.set_data([], [])
return init()
"""
return lineGreen, lineRed, lineClose
def key_event_handler(event):
if len(event.key) == 0:
return
else:
keycode = event.key
# pause/resume with space bar
if keycode == ' ':
self.ispause = not self.ispause
#if not self.ispause:
# update()
return
# other matplotlib keyboard shortcuts:
# 'o' box zoom
# 'p' mouse pan and zoom
# 'h' or 'home' reset
# 's' save
# 'g' toggle grid (when mouse is over a plot)
# 'k' toggle log/lin x axis
# 'l' toggle log/lin y axis
#plt.draw()
#update()
fig.canvas.mpl_connect('key_press_event', key_event_handler)
self._fig = fig
self._init = init
self._update = update
def updatePortfolio(self, port):
self.port = port
def train(self, training_agent, total_episode, save_movie=False):
def animate_func(step):
#self.step = step
if step > total_episode:
print("Finish training ")
#plt.close()
else:
training_agent(episode=step, visual=self)
plt.pause(1.001) # makes the UI a little more responsive
if not self.ispause:
return self._update()
self.am = animation.FuncAnimation(self._fig, animate_func, total_episode, init_func=self._init, interval=16, repeat=False, blit=False)
if save_movie:
mywriter = animation.FFMpegWriter(fps=24, codec='libx264', extra_args=['-pix_fmt', 'yuv420p', '-profile:v', 'high', '-tune', 'animation', '-crf', '18'])
self.am.save("./video.mp4", writer=mywriter)
else:
plt.show(block=True)
| adminho/trading-stock-thailand | deep_q/animation.py | animation.py | py | 3,682 | python | en | code | 64 | github-code | 36 | [
{
"api_name": "matplotlib.pyplot.subplots",
"line_number": 15,
"usage_type": "call"
},
{
"api_name": "matplotlib.pyplot",
"line_number": 15,
"usage_type": "name"
},
{
"api_name": "matplotlib.pyplot.gcf",
"line_number": 16,
"usage_type": "call"
},
{
"api_name": "ma... |
35535613818 | import os
import pickle
import pandas as pd
from flask import Flask, request
from flasgger import Swagger
app = Flask(__name__)
Swagger(app)
current_path = os.path.dirname(os.path.realpath(__file__))
pickle_in = open(f"{current_path}/model.pkl", "rb")
rf = pickle.load(pickle_in)
@app.route("/")
def index() -> str:
"""
Home
"""
return "Welcome All!!"
@app.route("/predict", methods=["GET"])
def predict_note():
"""Let's Authenticate the Banks Note
---
parameters:
- name: variance
in: query
type: number
required: true
- name: skewness
in: query
type: number
required: true
- name: curtosis
in: query
type: number
required: true
- name: entropy
in: query
type: number
required: true
responses:
200:
description: The output values
"""
variance = request.args.get("variance")
skewness = request.args.get("skewness")
curtosis = request.args.get("curtosis")
entropy = request.args.get("entropy")
prediction = rf.predict([[variance, skewness, curtosis, entropy]])
return "This is my prediction" + str(prediction)
@app.route("/predict_file", methods=["POST"])
def predict_file():
"""Predict File Output
---
parameters:
- name: test
in: formData
type: file
required: true
responses:
200:
description: The output values
"""
df_test = pd.read_csv(request.files.get("test"))
file_prediction = rf.predict(df_test)
return "Predcted values of test file is" + str(list(file_prediction))
if __name__ == "__main__":
app.run(host="0.0.0.0")
| calkikhunt/bank_note_authentication | app.py | app.py | py | 1,724 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "flask.Flask",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "flasgger.Swagger",
"line_number": 9,
"usage_type": "call"
},
{
"api_name": "os.path.dirname",
"line_number": 11,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_number... |
1437566397 | from collections import OrderedDict
# create dummy _ fct (so that gettext can parse dict)
# language options
langOption = OrderedDict(en="English", fr="Français", de="Deutsch")
# summary
summary_info = {'en': ['filename', '# users', 'file size'], 'fr': ['nom de fichier', 'nb utilisateurs', 'taille du fichier'], 'de': []}
summary_title = {'en': "Summary", 'fr': "Résumé", 'de': ''}
# short or long attrakdiff
order_short = ["QP2*", 'ATT2', 'QP3*', 'QHI3*', 'QP5*', "QHI4", "QHS2", "ATT5*", "QP6", "QHS5"]
order_long = [
"QP1*", "QHI1", "ATT1*", "QHS1*", "QP2*", "QHI2*", "ATT2", "QP3", "ATT3*", "QP4",
"QHI3*", "QP5*", "QHI4", "QHI5", "QHI6*", "QHI7", "ATT4", "QHS2", "ATT5*",
'QP6', "ATT6", "QHS3*", "QHS4*", "QHS5", "QHS6", "ATT7*", "QHS7*", "QP7"
]
# few words translated
i18n_average = {'en': "Average value", 'fr': "Valeur moyenne", 'de': ""}
i18n_dim = {'en': "Dimension", 'fr': "Dimension", 'de': ""}
plt_avrg = {'en': "Diagram of average values", 'fr': "Graphique des valeurs moyennes", 'de': ""}
plt_pair = {'en': "Description of Word-pairs", 'fr': "Graphique des paires de mots", 'de': ""}
plt_attr = {'en': "Portfolio-presentation", 'fr': "Portfolio des résultats", 'de': ""}
# categories title
categories = ["QP", "QHS", "QHI", "ATT"]
titles = {
"QP": {'en': "Pragmatic Quality", 'fr': "Qualité Pragmatique", 'de': ""},
"QHS": {'en': "Hedonic Quality - Stimulation", 'fr': "Qualité hédonique - stimulation", 'de': ""},
"QHI": {'en': "Hedonic Quality - Identify", 'fr': "Qualité hédonique - identification", 'de': ""},
"QH": {'en': "Hedonic Quality", 'fr': "Qualité hédonique", 'de': ""},
"ATT": {'en': "Attrativeness", 'fr': "Attractivité", 'de': ""}
}
# attrakdiff cases
QPQH = {
(-2, 2): {'en': "too\nself-\noriented", 'fr': "trop\norienté\nvers le soi", 'de': ""},
(0, 2): {'en': "self-\noriented", 'fr': "orienté\nvers le soi", 'de': ""},
(2, 2): {'en': "desired", 'fr': "désiré", 'de': ""},
(0, 0): {'en': "neutral", 'fr': "neutre", 'de': ""},
(2, 0): {'en': "taks-\noriented", 'fr': "orienté tâche", 'de': ""},
(-2, -2): {'en': "superfluous", 'fr': "superflu", 'de': ""},
(2, -2): {'en': "too\ntask-\noriented", 'fr': "trop\norienté\ntâche", 'de': ""},
}
# pairs of word, ordered in the order we want them on the graph (QP, QHS, QHI and APP)
pairs = OrderedDict(
QP1={
'en': ("Technical", "Human"),
'fr': ("Technique", "Humain"),
'de': ("", "")
},
QP2={
'en': ("Complicated", "Simple"),
'fr': ("Compliqué", "Simple"),
'de': ("", "")
},
QP3={
'en': ("Impractical", "Pratical"),
'fr': ("Pas pratique", "Pratique"),
'de': ("", "")
},
QP4={
'en': ("Cumbersome", "Straightforward"),
'fr': ("Fastidieux", "Efficace"),
'de': ("", "")
},
QP5={
'en': ("Unpredictable", "Predictable"),
'fr': ("Imprévisible", "Prévisible"),
'de': ("", "")
},
QP6={
'en': ("Confusing", "Clearly structured"),
'fr': ("Confus", "Clair"),
'de': ("", "")
},
QP7={
'en': ("Unruly", "Manageable"),
'fr': ("Incontrôlable", "Maîtrisable"),
'de': ("", "")
},
QHS1={
'en': ("Conventional", "Inventive"),
'fr': ("Conventionnel", "Original"),
'de': ("", "")
},
QHS2={
'en': ("Unimaginative", "Creative"),
'fr': ("Sans imagination", "Créatif"),
'de': ("", "")
},
QHS3={
'en': ("Cautious", "Bold"),
'fr': ("Prudent", "Audacieux"),
'de': ("", "")
},
QHS4={
'en': ("Conservative", "Innovative"),
'fr': ("Conservateur", "Novateur"),
'de': ("", "")
},
QHS5={
'en': ("Dull", "Captivating"),
'fr': ("Ennuyeux", "Captivant"),
'de': ("", "")
},
QHS6={
'en': ("Undemanding", "Challenging"),
'fr': ("Peu exigeant", "Challenging"),
'de': ("", "")
},
QHS7={
'en': ("Ordinary", "Novel"),
'fr': ("Commun", "Nouveau"),
'de': ("", "")
},
QHI1={
'en': ("Isolating", "Connective"),
'fr': ("M’isole", "Me sociabilise"),
'de': ("", "")
},
QHI2={
'en': ("Unprofessional", "Professional"),
'fr': ("Amateur", "Professionnel"),
'de': ("", "")
},
QHI3={
'en': ("Tacky", "Stylish"),
'fr': ("De mauvais goût", "De bon goût"),
'de': ("", "")
},
QHI4={
'en': ("Cheap", "Premium"),
'fr': ("Bas de gamme", "Haut de gamme"),
'de': ("", "")
},
QHI5={
'en': ("Alienating", "Integrating"),
'fr': ("M’exclut", "M’intègre"),
'de': ("", "")
},
QHI6={
'en': ("Separates me", "Bring me closer"),
'fr': ("Me sépare des autres", "Me rapproche des autres"),
'de': ("", "")
},
QHI7={
'en': ("Unpresentable", "Presentable"),
'fr': ("Non présentable", "Présentable"),
'de': ("", "")
},
ATT1={
'en': ("Unpleasant", "Pleasant"),
'fr': ("Déplaisant", "Plaisant"),
'de': ("", "")
},
ATT2={
'en': ("Ugly", "Attractive"),
'fr': ("Laid", "Beau"),
'de': ("", "")
},
ATT3={
'en': ("Disagreeable", "Likeable"),
'fr': ("Désagréable", "Agréable"),
'de': ("", "")
},
ATT4={
'en': ("Rejecting", "Inviting"),
'fr': ("Rebutant", "Attirant"),
'de': ("", "")
},
ATT5={
'en': ("Bad", "Good"),
'fr': ("Mauvais", "Bon"),
'de': ("", "")
},
ATT6={
'en': ("Repelling", "appealing"),
'fr': ("Repoussant", "Attrayant"),
'de': ("", "")
},
ATT7={
'en': ("Discouraging", "Motivating"),
'fr': ("Décourageant", "Motivant"),
'de': ("", "")
}
)
| thilaire/PLADIF | pladif/naming.py | naming.py | py | 5,236 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "collections.OrderedDict",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "collections.OrderedDict",
"line_number": 50,
"usage_type": "call"
}
] |
11469135915 | """
sample hyperparameters
sample maximum values by gumble, random features
all in python
optimization the criterion
1 hyperparameter
feed: Xsamples, ysamples, l, sigma, sigma0, initialxs
multiple hyperparameters
feed: Xsamples, ysamples, ls, sigmas, sigma0s, initialxs
return
train operation
x to query
!covmatrix computed once and cached for each optimization
GPflow
optimize for the correct hyperparameters
samples of hyperparameters
Implement:
feed: Xsamples, ysamples
return
samples of maximum values
by gumble
by random features
optimize for maximum from Xsamples, and some random initialization
x to query
optimize a function with multiple initializations
list of x variables (initialized with initializations)
list of optimizers for each x variables
feed: initial values
Xsamples, ysamples
l, sigma, sigma0
"""
import tensorflow as tf
import numpy as np
import scipy as sp
import time
import scipy.stats as spst
import sys
import utils
import matplotlib.pyplot as plt
# draw random features, and their weights
def draw_random_init_weights_features(
xdim, n_funcs, n_features,
xx, # (nobs, xdim)
yy, # (nobs, 1)
l, sigma, sigma0,
# (1,xdim), (), ()
dtype=tf.float32,
name='random_features'):
"""
sigma, sigma0: scalars
l: 1 x xdim
xx: n x xdim
yy: n x 1
n_features: a scalar
different from draw_random_weights_features,
this function set W, b, noise as Variable that is initialized randomly
rather than sample W, b, noise from random function
"""
n = tf.shape(xx)[0]
xx = tf.tile( tf.expand_dims(xx, axis=0), multiples=(n_funcs,1,1) )
yy = tf.tile( tf.expand_dims(yy, axis=0), multiples=(n_funcs,1,1) )
idn = tf.tile(tf.expand_dims(tf.eye(n, dtype=dtype), axis=0), multiples=(n_funcs,1,1))
# draw weights for the random features.
W = tf.get_variable(name="{}_W".format(name),
shape=(n_funcs, n_features,xdim),
dtype=dtype,
initializer=tf.random_normal_initializer()) \
* tf.tile( tf.expand_dims(tf.sqrt(l), axis=0),
multiples=(n_funcs,n_features,1) )
# W = tf.random.normal(shape=(n_funcs, n_features,xdim), dtype=dtype) * tf.tile( tf.expand_dims(tf.sqrt(l), axis=0), multiples=(n_funcs,n_features,1) )
# n_funcs x n_features x xdim
b = 2.0 * np.pi \
* tf.get_variable(
name="{}_b".format(name),
shape=(n_funcs,n_features,1),
dtype=dtype,
initializer=tf.random_uniform_initializer(minval=0., maxval=1.))
# b = 2.0 * np.pi * tf.random.uniform(shape=(n_funcs,n_features,1), dtype=dtype)
# n_funcs x n_features x 1
# compute the features for xx.
Z = tf.cast(tf.sqrt(2.0 * sigma / n_features), dtype=dtype)\
* tf.cos( tf.matmul(W, xx, transpose_b=True)
+ tf.tile(b, multiples=(1,1,n) ))
# n_funcs x n_features x n
# draw the coefficient theta.
noise = tf.get_variable(
name="{}_noise".format(name),
shape=(n_funcs,n_features,1),
dtype=dtype,
initializer=tf.random_normal_initializer())
# noise = tf.random.normal(shape=(n_funcs,n_features,1))
# n_funcs x n_features x 1
def true_clause():
Sigma = tf.matmul(Z, Z, transpose_a=True) + sigma0 * idn
# n_funcs x n x n of rank n or n_features
mu = tf.matmul(tf.matmul(Z, utils.multichol2inv(Sigma, n_funcs, dtype=dtype)), yy)
# n_funcs x n_features x 1
# tf.linalg.eigh returns None sometimes!!!
e, v = tf.linalg.eigh(Sigma)
# e = tf.linalg.eigvalsh(Sigma)
e = tf.expand_dims(e, axis=-1)
# n_funcs x n x 1
r = tf.reciprocal(tf.sqrt(e) * (tf.sqrt(e) + tf.sqrt(sigma0)))
# n_funcs x n x 1
theta = noise \
- tf.matmul(Z,
tf.matmul(v,
r * tf.matmul(v,
tf.matmul(Z, noise, transpose_a=True),
transpose_a=True))) \
+ mu
# n_funcs x n_features x 1
return theta
def false_clause():
Sigma = utils.multichol2inv( tf.matmul(Z, Z, transpose_b=True) / sigma0
+ tf.tile(tf.expand_dims(tf.eye(n_features, dtype=dtype), axis=0), multiples=(n_funcs,1,1)),
n_funcs, dtype=dtype)
mu = tf.matmul(tf.matmul(Sigma, Z), yy) / sigma0
theta = mu + tf.matmul(tf.cholesky(Sigma), noise)
return theta
# theta = tf.cond(
# pred=tf.less(n, n_features),
# true_fn=true_clause,
# false_fn=false_clause
# )
print("Need to debug the sampling of theta, W, b in optfunc.py:draw_random_init_weights_features")
theta = false_clause()
return theta, W, b
def make_function_sample(x, n_features, sigma, theta, W, b, dtype=tf.float32):
fval = tf.squeeze( tf.sqrt(2.0 * sigma / n_features) \
* tf.matmul(theta,
tf.cos( tf.matmul(W,
x,
transpose_b=True)
+ b ),
transpose_a=True) )
# x must be a 2d tensor
# return: n_funcs x tf.shape(x)[0]
# or (tf.shape(x)[0],) if n_funcs = 1
return fval
def duplicate_function_with_multiple_inputs(f, n_inits, xmin=-np.infty, xmax=np.infty, dtype=tf.float32):
xs_list = [None] * n_inits
fvals = [None] * n_inits
for i in range(n_inits):
xs_list[i] = tf.get_variable(shape=(1,xdim), dtype=dtype, name='{}_{}'.format(name, i),
constraint=lambda x: tf.clip_by_value(x, xmin, xmax))
fvals[i] = f(xs_list[i])
fvals = tf.squeeze(tf.stack(fvals))
xs = tf.stack(xs_list)
return xs, xs_list, fvals
# find maximum of a function with multiple initializers
# a function is a tensor, so this function can be used in the above function
def find_maximum_with_multiple_init_tensor(xs_list, fvals, n_inits, xdim, optimizer, dtype=tf.float32):
"""
# xmin=-np.infty, xmax=np.infty,
xs: list of size n_inits of (1,xdim)
fvals: (n_inits,): function value with inputs are xs tensor
initializers: n_inits x xdim
"""
# initializers: n x d
# func: a tensor function
# input: tensor n x d
# output: tensor n x 1
# n_inits: scalar (not a tensor)
"""
returns:
vals: shape = (n_inits,)
invars: shape = (n_inits,xdim)
maxval: scalar
maxinvar: shape= (xdim,)
"""
trains = [None] * n_inits
for i in range(n_inits):
trains[i] = optimizer.minimize(-fvals[i], var_list=[xs_list[i]])
max_idx = tf.argmax(fvals)
return trains, max_idx
def find_maximum_list_of_funcs(xdim, n_inits, n_funcs, xs, xs_list, fvals, optimizer, dtype=tf.float32):
"""
xs: shape=(n_funcs, n_inits, xdim)
xs_list: list of n_funcs lists of size n_inits of tensor (1,xdim)
fvals: tensor of shape (n_funcs, n_inits)
#initializers: (n_funcs, n_inits, xdim)
"""
train_all = []
max_val_all = [None] * n_funcs
max_input_all = [None] * n_funcs
max_idx_all = []
for i in range(n_funcs):
trains, max_idx = find_maximum_with_multiple_init_tensor(xs_list[i], fvals[i,...], n_inits, xdim, dtype=dtype, optimizer=optimizer)
train_all.extend(trains)
max_idx_all.append(max_idx)
max_input_all[i] = xs[i,max_idx,...]
max_val_all[i] = fvals[i,max_idx]
max_val_arr = tf.reshape(tf.stack(max_val_all), shape=(n_funcs,))
max_input_arr = tf.reshape(tf.stack(max_input_all), shape=(n_funcs,xdim))
max_idx_arr = tf.reshape(tf.stack(max_idx_all), shape=(n_funcs,))
return train_all, max_val_arr, max_input_arr, max_idx_arr
def gen_fval_xs(funcs, n_inits, xdim, xmin, xmax, dtype=tf.float32, name='test'):
"""
if funcs is a list of functions
return xs: nfuncs x n_inits x xdim
xs_list: list of nfuncs lists of n_inits tensors of size (1,xdim)
fvals: nfuncs x n_inits
else:
return xs: n_inits x xdim
xs_list: list of n_inits tensors of size (1,xdim)
fvals: n_inits,
"""
if isinstance(funcs, list):
print("List of functions")
n_funcs = len(funcs)
xs_list = [[tf.get_variable(shape=(1,xdim), dtype=dtype, name='{}_{}_{}'.format(name, i, j),
constraint=lambda x: tf.clip_by_value(x, xmin, xmax)) for i in range(n_inits)] for j in range(n_funcs)]
xs = []
for i in range(n_funcs):
xs.append( tf.stack(xs_list[i]) )
xs = tf.stack(xs)
fvals = []
for i in range(n_funcs):
fvals_i = []
for j in range(n_inits):
fvals_i.append( tf.squeeze(funcs[i](xs_list[i][j])) )
fvals.append( tf.squeeze(tf.stack(fvals_i)) )
fvals = tf.stack(fvals)
else: # funcs is a function
print("A function")
xs_list = [tf.get_variable(shape=(1,xdim), dtype=dtype, name='test_func_mul_init_{}'.format(i),
constraint=lambda x: tf.clip_by_value(x, xmin, xmax)) for i in range(n_inits)]
fvals = [funcs(x) for x in xs_list]
xs = tf.reshape(tf.concat(xs_list, axis=0), shape=(n_inits, xdim))
fvals = tf.squeeze(tf.concat(fvals, axis=0))
return xs, xs_list, fvals
# draw random features, and their weights
def draw_random_init_weights_features_np(
xdim, n_funcs, n_features,
xx, # (nobs, xdim)
yy, # (nobs, 1)
l, sigma, sigma0):
# (1,xdim), (), ()
"""
sigma, sigma0: scalars
l: 1 x xdim
xx: n x xdim
yy: n x 1
n_features: a scalar
different from draw_random_weights_features,
this function set W, b, noise as Variable that is initialized randomly
rather than sample W, b, noise from random function
"""
n = xx.shape[0]
l = l.reshape(1,xdim)
xx = np.tile( xx.reshape(1,n,xdim), reps=(n_funcs,1,1) )
yy = np.tile( yy.reshape(1,n,1), reps=(n_funcs,1,1) )
idn = np.tile( np.eye(n).reshape(1,n,n), reps=(n_funcs,1,1) )
# draw weights for the random features.
W = np.random.randn(n_funcs, n_features, xdim) \
* np.tile(np.sqrt(l).reshape(1,1,xdim),
reps=(n_funcs, n_features, 1))
# n_funcs x n_features x xdim
b = 2.0 * np.pi * np.random.rand(n_funcs, n_features, 1)
# n_funcs x n_features x 1
# compute the features for xx.
Z = np.sqrt(2.0 * sigma / n_features) \
* np.cos( np.matmul(W, np.transpose(xx, (0,2,1)))
+ np.tile(b, reps=(1,1,n)) )
# n_funcs x n_features x n
# draw the coefficient theta.
noise = np.random.randn(n_funcs, n_features, 1)
# n_funcs x n_features x 1
if n < n_features:
Sigma = np.matmul(np.transpose(Z, (0,2,1)), Z) \
+ sigma0 * idn
# n_funcs x n x n
mu = np.matmul( np.matmul(Z, np.linalg.inv(Sigma) ), yy)
# n_funcs x n_features x 1
e, v = np.linalg.eig(Sigma)
# n_funcs, n
# n_funcs, n, n
e = e.reshape(n_funcs, n, 1)
# n_funcs x n x 1
r = 1.0 / (np.sqrt(e) * (np.sqrt(e) + np.sqrt(sigma0)))
# n_funcs x n x 1
theta = noise \
- np.matmul(Z,
np.matmul(v,
r * np.matmul(np.transpose(v, (0,2,1)),
np.matmul(np.transpose(Z,(0,2,1)),
noise)
)
)
) \
+ mu
# n_funcs x n_features x 1
else:
Sigma = np.linalg.inv(
np.matmul(Z, np.transpose(Z,(0,2,1))) / sigma0
+ np.tile( np.eye(n_features).reshape(1,n_features,n_features), reps=(n_funcs,1,1) )
)
mu = np.matmul( np.matmul(Sigma,Z), yy ) / sigma0
theta = mu + np.matmul(np.linalg.cholesky(Sigma), noise)
return theta, W, b
# for testing draw_random_init_weights_features_np
def make_function_sample_np(x, n_features, sigma, theta, W, b):
fval = np.squeeze(
np.sqrt(2.0 * sigma / n_features)
* np.matmul(theta.T,
np.cos(
np.matmul(W, x.T)
+ b
)
) )
# x must be a 2d tensor
# return: n_funcs x tf.shape(x)[0]
# or (tf.shape(x)[0],) if n_funcs = 1
return fval
########################## TEST FUNCTIONS ##########################
def test_find_maximum_with_multiple_init_tensor(ntrain=10, n_inits=5, dtype = tf.float32):
"""
Adam with 1000 iterations
"""
tf.reset_default_graph()
xdim = 2
xmin = -10.
xmax = 10.
func = lambda x: tf.sin( tf.matmul(x,x,transpose_b=True) )
initializers = tf.random.uniform(shape=(n_inits,xdim), dtype=dtype) * (xmax - xmin) + xmin
xs, xs_list, fvals = gen_fval_xs(func, n_inits, xdim, xmin, xmax, dtype=dtype, name='test_max_f_mulinit')
assign_inits = []
for i in range(n_inits):
assign_inits.append( tf.assign(xs_list[i], tf.reshape(initializers[i,:], shape=(1,xdim))) )
optimizer = tf.train.AdamOptimizer()
trains, max_idx = find_maximum_with_multiple_init_tensor(xs_list, fvals, n_inits, xdim, dtype=dtype, name='find_maximum_multiple_inputs', optimizer=optimizer)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(assign_inits)
xs_val, xs_list_val, fvals_val = sess.run([xs, xs_list, fvals])
print('')
print('input = ', xs_val[0,...])
print(xs_list_val[0])
print('output = ', fvals_val[0])
for i in range(ntrain):
_, max_idx_val, xs_val, xs_list_val, fvals_val = sess.run([trains, max_idx, xs, xs_list, fvals])
print('')
print('input = ', xs_val[0,...])
print(xs_list_val[0])
print('output = ', fvals_val[0])
def test_find_maximum_list_of_funcs(ntrain, n_inits=5, dtype = tf.float32):
"""
Adam with 1000 iterations
"""
tf.reset_default_graph()
xdim = 2
xmin = -10.
xmax = 10.
funcs = [lambda x: tf.sin( tf.matmul(x,x,transpose_b=True) ),
lambda x: tf.cos( tf.matmul(x,x,transpose_b=True)) + 2.0 ]
n_funcs = len(funcs)
initializers = tf.random.uniform(shape=(n_funcs, n_inits, xdim), dtype=dtype) * (xmax - xmin) + xmin
xs, xs_list, fvals = gen_fval_xs(funcs, n_inits, xdim, xmin, xmax, dtype=dtype, name='test_max_listf')
assign_inits = []
for i in range(n_funcs):
for j in range(n_inits):
assign_inits.append( tf.assign(xs_list[i][j], tf.reshape(initializers[i,j,:], shape=(1,xdim))) )
optimizer = tf.train.AdamOptimizer()
trains, max_vals, max_inputs = find_maximum_list_of_funcs(xdim, n_inits, n_funcs, xs, xs_list, fvals, optimizer=optimizer, dtype=dtype, name="opt_list_funcs")
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(assign_inits)
for i in range(ntrain):
_, max_vals_val, max_inputs_val, xs_val, fvals_val = sess.run([trains, max_vals, max_inputs, xs, fvals])
if i == ntrain - 1 or i == 1:
print('')
print('max input = ', max_inputs_val)
print('max output = ', max_vals_val)
print('xs = ', xs_val)
print('fvals = ', fvals_val)
def test_draw_random_weights_features(n_funcs=10, n_features=500, dtype=tf.float32, randomize_funcs=False, func_param_plc=False, plot=True):
tf.reset_default_graph()
xdim = 1
xx = tf.placeholder(shape=(None, xdim), dtype=dtype, name='xx')
yy = tf.placeholder(shape=(None, 1), dtype=dtype, name='yy')
l = tf.get_variable(shape=(1,xdim), dtype=dtype, name='l')
sigma = tf.get_variable(shape=(), dtype=dtype, name='sigma')
sigma0 = tf.get_variable(shape=(), dtype=dtype, name='sigma0')
x = tf.placeholder(shape=(None, xdim), dtype=dtype, name='x')
thetas, Ws, bs = draw_random_init_weights_features(xdim, n_funcs, n_features, xx, yy, l, sigma, sigma0, dtype=dtype, name='random_features')
if func_param_plc:
thetas_plc = tf.placeholder(shape=(n_funcs, n_features, 1), dtype=dtype, name='theta')
Ws_plc = tf.placeholder(shape=(n_funcs, n_features, xdim), dtype=dtype, name='W')
bs_plc = tf.placeholder(shape=(n_funcs, n_features, 1), dtype=dtype, name='b')
fvals = []
for i in range(n_funcs):
fvals.append( make_function_sample(x, n_features, sigma, thetas_plc[i,...], Ws_plc[i,...], bs_plc[i,...], dtype=dtype) )
fvals = tf.stack(fvals)
else:
fvals = []
for i in range(n_funcs):
fvals.append( make_function_sample(x, n_features, sigma, thetas[i,...], Ws[i,...], bs[i,...], dtype=dtype) )
fvals = tf.stack(fvals)
xx_val = np.array([[0.], [1.], [4.], [5.]])
yy_val = np.array([[-5.], [0.5], [3.0], [0.3]])
l_val = np.array([[10.0]])
# xx_val = np.array([[0., 1.], [1.,1.], [4.,1.], [5.,1.1]])
# yy_val = np.array([[-5.], [0.5], [3.0], [0.3]])
# l_val = np.array([[10.0, 5.0]])
sigma_val = 2.0
sigma0_val = 1e-3
x_val = np.linspace(0., 5., 100).reshape(-1,1)
# x_val = np.array(list(zip( np.linspace(0., 5., 50), np.ones(50) )))
n_plot_funcs = 3
func_vals = []
fixed_func_vals = []
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
l.load(l_val, sess)
sigma.load(sigma_val, sess)
sigma0.load(sigma0_val, sess)
start = time.time()
feed_dict = {xx: xx_val, yy: yy_val, x: x_val}
if func_param_plc and not randomize_funcs:
thetas_val, Ws_val, bs_val = sess.run([thetas, Ws, bs], feed_dict=feed_dict)
for i in range(n_plot_funcs):
if randomize_funcs:
sess.run(tf.global_variables_initializer())
if func_param_plc:
thetas_val, Ws_val, bs_val = sess.run([thetas, Ws, bs], feed_dict=feed_dict)
if func_param_plc:
func_val = sess.run(fvals, feed_dict=feed_dict)
else:
func_val = sess.run(fvals, feed_dict=feed_dict)
func_vals.append(func_val)
print("End in {:.4f}s".format(time.time() - start))
if plot:
fig, axs = plt.subplots(n_plot_funcs,2)
for j in range(n_plot_funcs):
for i in range(n_funcs):
axs[j,0].plot(np.squeeze(x_val), np.squeeze(func_vals[j][i,...]))
axs[j,0].scatter(xx_val, yy_val)
plt.show()
def test_maximize_random_weights_features(n_funcs=10, ntrain=100, n_inits=5, n_features=500, dtype=tf.float32, plot=True):
# TODO: not working, only optimize for 1 functions...
tf.reset_default_graph()
xdim = 1
xmin = 0.
xmax = 5.
xx = tf.placeholder(shape=(None, xdim), dtype=dtype, name='xx')
yy = tf.placeholder(shape=(None, 1), dtype=dtype, name='yy')
l = tf.get_variable(shape=(1,xdim), dtype=dtype, name='l')
sigma = tf.get_variable(shape=(), dtype=dtype, name='sigma')
sigma0 = tf.get_variable(shape=(), dtype=dtype, name='sigma0')
x = tf.placeholder(shape=(None, xdim), dtype=dtype, name='x')
thetas, Ws, bs = draw_random_init_weights_features(xdim, n_funcs, n_features, xx, yy, l, sigma, sigma0, dtype=dtype, name='random_features')
thetas_plc = tf.get_variable(shape=(n_funcs, n_features, 1), dtype=dtype, name='theta')
Ws_plc = tf.get_variable(shape=(n_funcs, n_features, xdim), dtype=dtype, name='W')
bs_plc = tf.get_variable(shape=(n_funcs, n_features, 1), dtype=dtype, name='b')
# funcs = [lambda x: tf.sin( x ),
# lambda x: tf.cos(x) + 2.0 ]
# funcs_np = [lambda x: np.sin( x ),
# lambda x: np.cos(x) + 2.0 ]
# n_funcs = len(funcs)
# optimizing function samples
initializers = tf.random.uniform(shape=(n_funcs, n_inits, xdim), dtype=dtype) * (xmax - xmin) + xmin
funcs = []
for i in range(n_funcs):
funcs.append( (lambda x: make_function_sample(x, n_features, sigma, thetas_plc[i,...], Ws_plc[i,...], bs_plc[i,...], dtype=dtype)) )
fvals = []
for i in range(n_funcs):
fvals.append( funcs[i](x) )
fvals = tf.stack(fvals)
print("# of funcs: ", len(funcs))
sys.stdout.flush()
# xs, xs_list, opt_fvals = gen_fval_xs(funcs, n_inits, xdim, xmin, xmax, dtype=dtype, name='test_max_listf')
print("IMPORTANT: cannot use gen_fval_xs, only the last function would be used")
name = 'test_max_listf'
xs_list = [[tf.get_variable(shape=(1,xdim), dtype=dtype, name='{}_{}_{}'.format(name, i, j),
constraint=lambda x: tf.clip_by_value(x, xmin, xmax)) for i in range(n_inits)] for j in range(n_funcs)]
xs = []
for i in range(n_funcs):
xs.append( tf.stack(xs_list[i]) )
xs = tf.stack(xs)
opt_fvals = []
for i in range(n_funcs):
fvals_i = []
for j in range(n_inits):
fval = tf.squeeze( tf.sqrt(2.0 * sigma / n_features) \
* tf.matmul(thetas_plc[i,...],
tf.cos( tf.matmul(Ws_plc[i,...],
xs_list[i][j],
transpose_b=True)
+ bs_plc[i,...] ),
transpose_a=True) )
fvals_i.append( fval )
opt_fvals.append( tf.squeeze(tf.stack(fvals_i)) )
opt_fvals = tf.stack(opt_fvals)
assign_inits = []
for i in range(n_funcs):
for j in range(n_inits):
assign_inits.append( tf.assign(xs_list[i][j], tf.reshape(initializers[i,j,:], shape=(1,xdim))) )
optimizer = tf.train.AdamOptimizer()
trains, max_vals, max_inputs, max_idx_arr = find_maximum_list_of_funcs(xdim, n_inits, n_funcs, xs, xs_list, opt_fvals, optimizer=optimizer, dtype=dtype, name="opt_list_funcs")
xx_val = np.array([[0.], [1.], [4.], [5.]])
yy_val = np.array([[-5.], [0.5], [3.0], [0.3]])
l_val = np.array([[10.0]])
sigma_val = 2.0
sigma0_val = 1e-3
x_val = np.linspace(xmin, xmax, 100).reshape(-1,1)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
start = time.time()
l.load(l_val, sess)
sigma.load(sigma_val, sess)
sigma0.load(sigma0_val, sess)
thetas_val, Ws_val, bs_val = sess.run([thetas, Ws, bs], feed_dict={xx: xx_val,
yy: yy_val,
x: x_val})
thetas_plc.load(thetas_val, sess)
Ws_plc.load(Ws_val, sess)
bs_plc.load(bs_val, sess)
func_val = sess.run(fvals, feed_dict={x: x_val})
print("End evaluating functions in {:.4f}s".format(time.time() - start))
sys.stdout.flush()
start = time.time()
sess.run(assign_inits)
xs_val, opt_fvals_val = sess.run([xs, opt_fvals])
for i in range(ntrain):
_, max_vals_val, max_inputs_val, max_idx_arr_val = sess.run([trains, max_vals, max_inputs, max_idx_arr])
if i == ntrain - 1 or i == 1:
print('')
print('max input = ', max_inputs_val)
print('max output = ', max_vals_val)
print('max_idx_arr = ', max_idx_arr_val)
# for j in range(n_funcs):
# print(' ', xs_val[j,max_idx_arr_val[j],...])
# print(' f:', opt_fvals_val[j,max_idx_arr_val[j],...])
print("End optimizing in {:.4f}s".format(time.time() - start))
sys.stdout.flush()
if plot:
fig, axs = plt.subplots()
for i in range(n_funcs):
axs.plot(np.squeeze(x_val), np.squeeze(func_val[i,...]), zorder=0)
axs.scatter(xx_val, yy_val, zorder=3)
axs.scatter(np.squeeze(max_inputs_val), max_vals_val, zorder=5, c='r')
plt.show()
| ZhaoxuanWu/Trusted-Maximizers-Entropy-Search-BO | optfunc.py | optfunc.py | py | 25,020 | python | en | code | 3 | github-code | 36 | [
{
"api_name": "tensorflow.float32",
"line_number": 61,
"usage_type": "attribute"
},
{
"api_name": "tensorflow.shape",
"line_number": 74,
"usage_type": "call"
},
{
"api_name": "tensorflow.tile",
"line_number": 76,
"usage_type": "call"
},
{
"api_name": "tensorflow.e... |
26707445232 | from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
String,
Date,
DateTime,
UniqueConstraint,
)
from sqlalchemy.orm import relationship, backref
from sqlalchemy.sql import func
from app.db.base_class import Base
class Saving(Base):
id = Column(Integer, primary_key=True, index=True)
month = Column(String, nullable=False)
year = Column(Integer, nullable=False)
amount = Column(Integer, nullable=False)
date_sent = Column(Date, nullable=False)
is_late = Column(Boolean(), default=False)
fine_id = Column(Integer, ForeignKey("fine.id"), nullable=True)
member_id = Column(Integer, ForeignKey("user.id"), nullable=False)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
fine = relationship("Fine", backref=backref("saving", uselist=False))
__table_args__ = (
UniqueConstraint("month", "year", "member_id", name="month_year_user_unique"),
)
| boswellgathu/chama | backend/app/models/saving.py | saving.py | py | 1,033 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "app.db.base_class.Base",
"line_number": 17,
"usage_type": "name"
},
{
"api_name": "sqlalchemy.Column",
"line_number": 18,
"usage_type": "call"
},
{
"api_name": "sqlalchemy.Integer",
"line_number": 18,
"usage_type": "argument"
},
{
"api_name": "sqlal... |
22565786888 | import itertools
import json
import zipfile
from typing import BinaryIO, List, Tuple
import numpy as np
from PIL import Image
from shap_e.rendering.view_data import Camera, ProjectiveCamera, ViewData
class BlenderViewData(ViewData):
"""
Interact with a dataset zipfile exported by view_data.py.
"""
def __init__(self, f_obj: BinaryIO):
self.zipfile = zipfile.ZipFile(f_obj, mode="r")
self.infos = []
with self.zipfile.open("info.json", "r") as f:
self.info = json.load(f)
self.channels = list(self.info.get("channels", "RGBAD"))
assert set("RGBA").issubset(
set(self.channels)
), "The blender output should at least have RGBA images."
names = set(x.filename for x in self.zipfile.infolist())
for i in itertools.count():
name = f"{i:05}.json"
if name not in names:
break
with self.zipfile.open(name, "r") as f:
self.infos.append(json.load(f))
@property
def num_views(self) -> int:
return len(self.infos)
@property
def channel_names(self) -> List[str]:
return list(self.channels)
def load_view(self, index: int, channels: List[str]) -> Tuple[Camera, np.ndarray]:
for ch in channels:
if ch not in self.channel_names:
raise ValueError(f"unsupported channel: {ch}")
# Gather (a superset of) the requested channels.
channel_map = {}
if any(x in channels for x in "RGBA"):
with self.zipfile.open(f"{index:05}.png", "r") as f:
rgba = np.array(Image.open(f)).astype(np.float32) / 255.0
channel_map.update(zip("RGBA", rgba.transpose([2, 0, 1])))
if "D" in channels:
with self.zipfile.open(f"{index:05}_depth.png", "r") as f:
# Decode a 16-bit fixed-point number.
fp = np.array(Image.open(f))
inf_dist = fp == 0xFFFF
channel_map["D"] = np.where(
inf_dist,
np.inf,
self.infos[index]["max_depth"] * (fp.astype(np.float32) / 65536),
)
if "MatAlpha" in channels:
with self.zipfile.open(f"{index:05}_MatAlpha.png", "r") as f:
channel_map["MatAlpha"] = np.array(Image.open(f)).astype(np.float32) / 65536
# The order of channels is user-specified.
combined = np.stack([channel_map[k] for k in channels], axis=-1)
h, w, _ = combined.shape
return self.camera(index, w, h), combined
def camera(self, index: int, width: int, height: int) -> ProjectiveCamera:
info = self.infos[index]
return ProjectiveCamera(
origin=np.array(info["origin"], dtype=np.float32),
x=np.array(info["x"], dtype=np.float32),
y=np.array(info["y"], dtype=np.float32),
z=np.array(info["z"], dtype=np.float32),
width=width,
height=height,
x_fov=info["x_fov"],
y_fov=info["y_fov"],
)
| openai/shap-e | shap_e/rendering/blender/view_data.py | view_data.py | py | 3,109 | python | en | code | 10,619 | github-code | 36 | [
{
"api_name": "shap_e.rendering.view_data.ViewData",
"line_number": 12,
"usage_type": "name"
},
{
"api_name": "typing.BinaryIO",
"line_number": 17,
"usage_type": "name"
},
{
"api_name": "zipfile.ZipFile",
"line_number": 18,
"usage_type": "call"
},
{
"api_name": "j... |
38027952617 | from typing import Dict, List
from twin_runtime.twin_runtime_core import TwinRuntime
from twin_runtime.twin_runtime_core import LogLevel
class TwinBuilderSimulator():
def __init__(self, twin_model_file, state_variable_names: List,
action_variable_names: List,
number_of_warm_up_steps, warm_up_action_variable_values: List):
self.state = {}
self.twin_runtime = None #assigned in reset
self.done = False
self.twin_model_file = twin_model_file
self.state_variable_names = state_variable_names
self.action_variable_names = action_variable_names
self.number_of_warm_up_steps = number_of_warm_up_steps
self.warm_up_action_variable_values = warm_up_action_variable_values
self.step_size = 0.5
self.time_index = 0
self.reset(self.step_size)
def reset(self, step_size):
self.done = False
runtime_log = self.twin_model_file.replace('.twin', '.log')
self.step_size = step_size
if self.twin_runtime != None:
self.twin_runtime.twin_close()
self.twin_runtime = None
# Load Twin, set the parameters values, initialize (and generate snapshots, output)
self.twin_runtime = TwinRuntime(self.twin_model_file, runtime_log, log_level=LogLevel.TWIN_LOG_ALL)
self.twin_runtime.twin_instantiate()
self.twin_runtime.twin_initialize()
for state_variable_name in self.state_variable_names:
self.state[state_variable_name] = 0
self.time_index = 0
self.state['time_index'] = self.time_index
# Run initial steps to "warm up" the simulation
if self.number_of_warm_up_steps > 0:
action = dict(zip(self.action_variable_names, self.warm_up_action_variable_values))
for i in range(self.number_of_warm_up_steps):
self.episode_step(action)
def get_state(self) -> Dict[str, float]:
"""Called to retreive the current state of the simulator. """
print(f"returning state: {self.state}")
return self.state
def halted(self) -> bool:
"""
Should return True if the simulator cannot continue for some reason
"""
return self.done
def episode_start(self, config: Dict = None) -> None:
""" Called at the start of each episode """
self.reset(config["step_size"] or 0.5)
def episode_step(self, action: Dict):
""" Called for each step of the episode """
for f in action.keys():
self.twin_runtime.twin_set_input_by_name(f, action[f])
self.twin_runtime.twin_simulate(self.time_index)
for state_variable_name in self.state_variable_names:
value = self.twin_runtime.twin_get_output_by_name(state_variable_name).value
print(value)
self.state[state_variable_name] = value
self.state['time_index'] = self.time_index
#increase the index
self.time_index = self.time_index + self.step_size
def episode_finish(self):
""" Called at the end of each episode """
self.twin_runtime.twin_close()
self.twin_runtime = None
| microsoft/bonsai-twin-builder | TwinBuilderConnector/TwinBuilderSimulator.py | TwinBuilderSimulator.py | py | 3,207 | python | en | code | 6 | github-code | 36 | [
{
"api_name": "typing.List",
"line_number": 7,
"usage_type": "name"
},
{
"api_name": "typing.List",
"line_number": 8,
"usage_type": "name"
},
{
"api_name": "typing.List",
"line_number": 9,
"usage_type": "name"
},
{
"api_name": "twin_runtime.twin_runtime_core.TwinR... |
22246405784 | import common.parse_util as cpu
from common.common_based import CommonBased
class AdaType(CommonBased):
RECORD_TYPE = "Record"
ENUM_TYPE = "Enum"
STR_TYPE = "String"
ARRAY_TYPE = "Array"
DERIVED_TYPE = "Derived"
SUBTYPE = "Subtype"
INT_TYPE = "Integer"
REAL_TYPE = "Real"
FIELD_TYPE = "Field"
VAR_TYPE = "Var"
ENUM_ITEM_TYPE = 'Enum_Item'
ARRAY_INDEX_TYPE = 'Array_Index'
def __init__(self, name, ttype, package=None, ctx=None, is_based=False):
super(AdaType, self).__init__()
self.name = name.upper() if name else None
self.package = package.upper() if package else None
self.ttype = ttype
self.is_based = is_based
self.discriminant = {}
self.size = None
self.ctx = ctx
#self.ctx.cur_type = self
self.size_solved = False
self.constraint_solved = False
self.type_chain = []
self.first = None
self.last = None
self.must_print = ['first', 'last', 'size']
self.to_print = ['discriminant', 'size']
self.leader_str = "'%s type [%s] in [%s]:' % (self.ttype, self.name, self.package)"
self.to_solve = []
self.to_check = []
def add_discrim(self, fs):
for f in fs:
self.discriminant[f.name] = f
def solve_type_chain(self):
pass
def full_name(self):
return '.'.join([self.package, self.name])
def solve_a_type_or_expr(self, attr_n, i_expr=None, solved_n=None, mandatory=True, is_type=True):
if not solved_n:
solved_n = "%s_solved" % attr_n
if not getattr(self, solved_n):
if not mandatory and not i_expr and not getattr(self, attr_n):
setattr(self, solved_n, True)
else:
if i_expr:
setattr(self, attr_n, i_expr)
if is_type:
attr_v, solved_v = cpu.solve_type(self.ctx, getattr(self, attr_n))
else:
attr_v, solved_v = cpu.solve_expr(self.ctx, getattr(self, attr_n))
setattr(self, attr_n, attr_v)
setattr(self, solved_n, solved_v)
def solve_size(self, i_size=None):
self.solve_a_type_or_expr('size', i_size)
def solve_constraint(self, const=None):
if const is not None:
setattr(self, 'constraint', const)
const = getattr(self, 'constraint', None)
if self.constraint_solved or not const:
self.constraint_solved = True
return
if const['type'] == 'range':
if const['range']['type'] == 'range':
self.first, solved1 = cpu.solve_expr(self.ctx, const['range']['first'])
self.last, solved2 = cpu.solve_expr(self.ctx, const['range']['last'])
self.constraint_solved = solved1 and solved2
elif const['range']['type'] == 'attr':
solved = True
if isinstance(const['range']['base'], str):
const['range']['base'], solved = cpu.solve_type(self.ctx, const['range']['base'])
if solved:
self.first, solved1 = cpu.solve_expr(self.ctx, const['range']['base'].first)
self.last, solved2 = cpu.solve_expr(self.ctx, const['range']['base'].last)
self.constraint_solved = solved1 and solved2
#if self.constraint_solved:
# setattr(self, 'constraint', None)
def check_solved(self):
for attr in self.to_solve:
method_n = 'solve_%s' % attr
solved_n = '%s_solved' % attr
if not getattr(self, solved_n):
getattr(self, method_n)()
ret = True
if not self.to_check:
self.to_check = self.to_solve
for attr in self.to_check:
solved_n = '%s_solved' % attr
ret = ret and getattr(self, solved_n)
if ret:
self.is_based = True
return ret | idealegg/AdaReader | common/ada_type.py | ada_type.py | py | 4,011 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "common.common_based.CommonBased",
"line_number": 6,
"usage_type": "name"
},
{
"api_name": "common.parse_util.solve_type",
"line_number": 60,
"usage_type": "call"
},
{
"api_name": "common.parse_util",
"line_number": 60,
"usage_type": "name"
},
{
"api... |
126083869 | from django.db import models
from django.contrib.auth.models import User
import json
import re
class FileType(models.Model):
name = models.CharField(max_length=64, primary_key=True)
def update_dict(base, changes):
for k, v in changes.items():
base[k] = v
if v is None:
del base[k]
return base
class NodeImage(models.Model):
name = models.CharField(max_length=128, primary_key=True)
labels_string = models.TextField(default="{}")
cmd_string = models.TextField(default="[]")
entrypoint_string = models.TextField(default="[]")
env_string = models.TextField(default="{}")
override_string = models.TextField(default="{}")
imported = models.BooleanField(default=False)
imported_tag = models.CharField(max_length=128, default="", blank=True)
imported_by = models.ForeignKey(
User, on_delete=models.SET_NULL, blank=True, null=True
)
added_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
@property
def tags(self):
return self.tag_refs
@property
def override(self):
return json.loads(self.override_string)
@override.setter
def override(self, override):
labels = self.labels
items = list(override.get("labels", {}).items())
for k, v in items:
try:
assert labels[k] == v
del override["labels"][k]
except:
pass
env = self.env
items = list(override.get("env", {}).items())
for k, v in items:
try:
assert env[k] == v
del override["env"][k]
except:
pass
try:
assert env["cmd"] == self.cmd
del override["cmd"]
except:
pass
try:
assert env["entrypoint"] == self.entrypoint
del override["entrypoint"]
except:
pass
self.override_string = json.dumps(override)
@property
def labels(self):
override = json.loads(self.override_string)
if override.get("labels", None) is not None:
return update_dict(json.loads(self.labels_string), override["labels"])
return json.loads(self.labels_string)
@labels.setter
def labels(self, labels):
self.labels_string = json.dumps(labels)
@property
def cmd(self):
override = json.loads(self.override_string)
if override.get("cmd", None) is not None:
return override["cmd"]
return json.loads(self.cmd_string)
@cmd.setter
def cmd(self, cmd):
self.cmd_string = json.dumps(cmd)
@property
def entrypoint(self):
override = json.loads(self.override_string)
if override.get("entrypoint", None) is not None:
return override["entrypoint"]
return json.loads(self.entrypoint_string)
@entrypoint.setter
def entrypoint(self, entrypoint):
self.entrypoint_string = json.dumps(entrypoint)
@property
def env(self):
override = json.loads(self.override_string)
if override.get("env", None) is not None:
return update_dict(json.loads(self.env_string), override["env"])
return json.loads(self.env_string)
@env.setter
def env(self, env):
self.env_string = json.dumps(env)
@property
def inputs_raw(self):
labels = self.labels
if labels.get("input_1", False):
# Multi-input mode
inputs = []
try:
i = 1
while True:
inputs.append(labels["input_" + str(i)])
i += 1
except: # input_k+1 does not exist, throws
pass
return inputs
single_input = labels.get("input", False)
if single_input:
# Single-input mode
return [single_input]
# No-input mode
return []
@classmethod
def prepare_input(cls, raw):
raw = re.sub(r",\s+", ",", raw)
raw = raw.split(",")
defaults = ["file", "", "required", "filename", "*"]
if len(raw) >= 2 and raw[1] == "stdin":
defaults[3] = "content"
if len(raw) > 4:
defaults[4] = raw[4]
for i in range(len(defaults)):
try:
assert raw[i] != ""
except:
if len(raw) > i:
raw[i] = defaults[i]
else:
raw.append(defaults[i])
return raw
@property
def inputs_meta(self):
inputs = self.inputs_raw
result = []
for i in inputs:
result.append(NodeImage.prepare_input(i))
return result
@property
def inputs(self):
inputs = [i[0] for i in self.inputs_meta]
p = "consumable "
inputs = [i[len(p) :] if i.startswith(p) else i for i in inputs]
return inputs
@property
def add_input(self):
labels = self.labels
inp = labels.get("input_n", False)
if not inp:
return False
return re.sub(r",\s+", ",", inp).split(",")[0]
@property
def add_input_meta(self):
labels = self.labels
inp = labels.get("input_n", False)
if not inp:
return False
return NodeImage.prepare_input(inp)
@property
def add_output(self):
labels = self.labels
out = labels.get("output_n", False)
if not out:
return False
return re.sub(r",\s+", ",", out).split(",")[0]
@property
def add_output_meta(self):
labels = self.labels
out = labels.get("output_n", False)
if not out:
return False
return NodeImage.prepare_output(out)
@property
def outputs_raw(self):
labels = self.labels
if labels.get("output_1", False):
# Multi-output mode
outputs = []
try:
i = 1
while True:
outputs.append(labels["output_" + str(i)])
i += 1
except: # output_k+1 does not exist, throws
pass
return outputs
single_output = labels.get("output", False)
if single_output:
# Single-output mode
return [single_output]
# No-output mode
return []
@classmethod
def prepare_output(cls, raw):
raw = re.sub(r",\s+", ",", raw)
raw = raw.split(",")
defaults = ["file", "stdout", "results.out"]
if len(raw) >= 3 and raw[2] == "":
# If the output filename is '', then don't override it. Foldername will be used as parameter.
defaults[2] = ""
if len(raw) >= 2 and raw[1] == "workingdir":
# Default for workingdir: Move all created files
defaults[2] = ""
for i in range(len(defaults)):
try:
assert raw[i] != ""
except:
if len(raw) > i:
raw[i] = defaults[i]
else:
raw.append(defaults[i])
return raw
@property
def outputs_meta(self):
outputs = self.outputs_raw
result = []
for o in outputs:
result.append(NodeImage.prepare_output(o))
return result
@property
def outputs(self):
return [i[0] for i in self.outputs_meta]
class NodeImageTag(models.Model):
image = models.ForeignKey(
NodeImage, related_name="tag_refs", on_delete=models.CASCADE
)
sha = models.CharField(max_length=64)
name = models.CharField(max_length=64, blank=True, default="")
def __str__(self):
return self.name if self.name else self.sha
| bromberglab/bio-node-webserver | django/app/models/node_image.py | node_image.py | py | 7,864 | python | en | code | 1 | github-code | 36 | [
{
"api_name": "django.db.models.Model",
"line_number": 7,
"usage_type": "attribute"
},
{
"api_name": "django.db.models",
"line_number": 7,
"usage_type": "name"
},
{
"api_name": "django.db.models.CharField",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "... |
30835468279 | from youtube_dl import YoutubeDL
import sys
ydl_opts = {'format': 'bestaudio/best',
'postprocessors': [{'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3',
'preferredquality': '192',
}]}
if __name__ == "__main__":
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
filenames = sys.argvp[1:]
ydl.download(filenames)
| jordankraude/Personal-Projects | Youtube Downloaders/youtube_to_mp3.py | youtube_to_mp3.py | py | 332 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "youtube_dl.YoutubeDL",
"line_number": 11,
"usage_type": "call"
},
{
"api_name": "sys.argvp",
"line_number": 12,
"usage_type": "attribute"
}
] |
7659689792 | # Importing the OpenCV library.
import cv2
# Importing the numpy library.
import numpy as np
# Reading the image from the path and storing it in the variable img.
img = cv2.imread("../Resources/konferansebord.jpg")
# Setting the width and height of the output image.
width, height = 250, 350
# Creating a list of points that are the corners of the image.
pts1 = np.float32([[111, 219], [287, 188], [154, 482], [352, 440]])
# Creating a list of points that are the corners of the image.
pts2 = np.float32([[0, 0], [width, 0], [0, height], [width, height]])
# Calculating the perspective transform matrix.
matrix = cv2.getPerspectiveTransform(pts1, pts2)
# Applying the perspective transform matrix to the image.
imgOutput = cv2.warpPerspective(img, matrix, (width, height))
# It shows the image in a window.
cv2.imshow("Image", img)
cv2.imshow("Output Image", imgOutput)
# It waits for a key to be pressed.
cv2.waitKey(0) | GurjotSinghAulakh/Python-openCV | 5. Wrap Perspective/chapter5.py | chapter5.py | py | 930 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "cv2.imread",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "numpy.float32",
"line_number": 14,
"usage_type": "call"
},
{
"api_name": "numpy.float32",
"line_number": 18,
"usage_type": "call"
},
{
"api_name": "cv2.getPerspectiveTransform",
... |
14994784583 | from django.urls import path
from . import views
from django.conf import settings
from django.contrib.staticfiles.urls import static
urlpatterns = [
path('',views.inicio, name='inicio'),
path('nosotros_copy',views.nosotros_copy, name='nosotros_copy'),
path('nosotros',views.nosotros, name='nosotros'),
path('dashboard',views.dashboard, name='dashboard'),
path('index',views.index, name='index'),
path('index_copy',views.index_copy, name='index_copy'),
path('viewuser',views.viewuser, name='viewuser'),
path('paginas/editar',views.editarp, name='editarp'),
path('paginas/crear',views.crearp, name='crearp'),
path('paginas/eliminar/<int:id>',views.eliminarp, name='eliminarp'),
path('paginas/editar/<int:id>',views.editarp, name='editarp'),
path('paginas/agregar',views.agregarp, name='agregarp'),
path('usuarios/editar',views.editaru, name='editaru'),
path('usuarios/crear',views.crearu, name='crearu'),
path('usuarios/listar',views.listar, name='listar'),
path('usuarios/agregar',views.agregaru, name='agregaru'),
path('usuarios/eliminar/<int:id>',views.eliminaru, name='eliminaru'),
path('usuarios/editar/<int:id>',views.editaru, name='editaru'),
path('buscar/', views.buscar, name='buscar'),
]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | LsuiValle/Proyecto | sub_proyecto/urls.py | urls.py | py | 1,335 | python | es | code | 0 | github-code | 36 | [
{
"api_name": "django.urls.path",
"line_number": 7,
"usage_type": "call"
},
{
"api_name": "django.urls.path",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "django.urls.path",
"line_number": 9,
"usage_type": "call"
},
{
"api_name": "django.urls.path",
... |
18830355953 | """File Type Utility Class."""
import logging
from pathlib import Path
from hdash.synapse.file_type import FileType
class FileTypeUtil:
"""File Type Utility Class."""
LEGACY_META_FILE_NAME = "synapse_storage_manifest.csv"
META_FILE_PREFIX = "synapse_storage_manifest_"
def __init__(self):
"""Create FileType Utility."""
self._init_file_types()
def get_file_type(self, file_name):
"""Determine the file type from the file name."""
path = Path(file_name)
file_type = FileType.OTHER
if file_name == self.LEGACY_META_FILE_NAME:
file_type = FileType.EXCLUDE
elif file_name.startswith(self.META_FILE_PREFIX):
file_type = FileType.METADATA
else:
file_extension = self._get_file_extension(path)
try:
file_type = self.file_type_map[file_extension]
except KeyError:
logging.warning("Unrecognized: %s", file_extension)
return file_type.value
def _get_file_extension(self, path):
"""Get File Extension, remove .gz, as needed."""
if path.name.startswith("."):
return path.name
if path.suffix == ".gz":
file_extension = "".join(path.suffixes[-2])
else:
file_extension = path.suffix
return file_extension
def _init_file_types(self):
"""Initialize File Types."""
self.file_type_map = {
".bam": FileType.BAM,
".fastq": FileType.FASTQ,
".fasta": FileType.FASTQ,
".fq": FileType.FASTQ,
".tif": FileType.IMAGE,
".tiff": FileType.IMAGE,
".svs": FileType.IMAGE,
".vsi": FileType.IMAGE,
".png": FileType.IMAGE,
".raw": FileType.IMAGE,
".jpg": FileType.IMAGE,
".scn": FileType.IMAGE,
".s0001_e00": FileType.IMAGE,
".csv": FileType.MATRIX,
".tsv": FileType.MATRIX,
".vcf": FileType.MATRIX,
".fcs": FileType.MATRIX,
".mtx": FileType.MATRIX,
".txt": FileType.MATRIX,
".h5ad": FileType.MATRIX,
".h5": FileType.MATRIX,
".xlsx": FileType.MATRIX,
".pdf": FileType.OTHER,
".rnk": FileType.OTHER,
".json": FileType.OTHER,
".bcf": FileType.OTHER,
".bzcfg": FileType.OTHER,
".log": FileType.OTHER,
".mzML": FileType.OTHER,
".zstd": FileType.OTHER,
".DS_Store": FileType.EXCLUDE,
".vimrc": FileType.EXCLUDE,
".Rhistory": FileType.EXCLUDE,
}
| ncihtan/hdash_air | hdash/synapse/file_type_util.py | file_type_util.py | py | 2,700 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "pathlib.Path",
"line_number": 19,
"usage_type": "call"
},
{
"api_name": "hdash.synapse.file_type.FileType.OTHER",
"line_number": 20,
"usage_type": "attribute"
},
{
"api_name": "hdash.synapse.file_type.FileType",
"line_number": 20,
"usage_type": "name"
},
... |
6270944947 | import os
import logging
import numpy as np
import pandas as pd
from sklearn.neighbors import KNeighborsClassifier
from .helpers import ROOT_DIR, DATA_DIR, RESULTS_DIR
from .helpers import GENRES, SUBSETS
from .helpers import start_experiment_log
from .helpers import relpath
from .classification import train_model
from .classification import get_scores
np.random.seed(0)
# Globals
# —————————————————————————————————————————————————————————————————————————————
PITCH_FEATURES = [f'freq_MIDI_{i}' for i in range(53, 87)]
PITCH_CLASS_FEATURES = [f'freq_pitch_class_{i}' for i in range(12)]
REPETITION_FEATURES = [f'repetition_score_MIDI_{i}' for i in range(53, 87)]
PROFILES = {
'pitch': PITCH_FEATURES,
'pitch_class': PITCH_CLASS_FEATURES,
'repetition': REPETITION_FEATURES
}
# Helpers
# —————————————————————————————————————————————————————————————————————————————
def get_conditions(genres='all', subsets='all', profiles='all'):
"""Get a list of experimental conditions
Parameters
----------
genres : str or list, optional
Genres to include, by default 'all'
subsets : str or list, optional
Subsets to include, by default 'all'
profile : str or list, optional
Profiles to include, by default 'all'
Returns
-------
(list, dict)
A list with all conditions, and a dictionary with keys 'genres',
'subsets' and 'profiles' containing those values.
"""
subsets = SUBSETS if subsets == 'all' else subsets
genres = GENRES if genres == 'all' else genres
profiles = list(PROFILES.keys()) if profiles == 'all' else profiles
conditions = []
for genre in genres:
for subset in subsets:
for profile in profiles:
conditions.append(
dict(genre=genre, subset=subset, profile=profile))
parts = dict(subsets=subsets, genres=genres, profiles=profiles)
return conditions, parts
def load_dataset(genre, subset, profile, split, data_dir=DATA_DIR):
"""Load a dataset for training the classifier. Returns a dataframe of
features and an array of corresponding targets (modes)"""
feature_names = PROFILES[profile]
features_fn = os.path.join(data_dir, genre, subset, f'{split}-features.csv')
data = pd.read_csv(features_fn, index_col=0)[feature_names]
chants_fn = os.path.join(data_dir, genre, subset, f'{split}-chants.csv')
targets = pd.read_csv(chants_fn, index_col=0)['mode']
assert len(targets) == len(data)
return data, targets
# Experiment
# —————————————————————————————————————————————————————————————————————————————
def run_condition(genre, subset, profile,
data_dir, results_dir,
n_iter, n_splits):
"""Runs a single experimental condition: trains the classifier, stores
all the model, cross-validation results, and evaluation scores."""
# Start experiment
logging.info(f'Training model...')
logging.info(f'* profile={profile}')
logging.info(f'* genre={genre}')
logging.info(f'* subset={subset}')
# Set up directories
output_dir = os.path.join(results_dir, genre, subset)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Load training and testing data
kwargs = dict(genre=genre, subset=subset, profile=profile, data_dir=data_dir)
train_data, train_targets = load_dataset(split='train', **kwargs)
test_data, test_targets = load_dataset(split='test', **kwargs)
logging.info(f'* Training/test size: {len(train_data)}/{len(test_data)}')
logging.info(f'* Num. features: {train_data.shape[1]}')
# Model parameters and param grid for tuning
fixed_params = {
'n_jobs': -1,
'p': 2,
'metric': 'minkowski'
}
tuned_params = {
'n_neighbors': np.arange(1, 50),
'weights': ['uniform', 'distance'],
'algorithm': ['ball_tree', 'kd_tree', 'brute'],
'leaf_size': np.arange(10, 100)
}
# Tune and train model
model = KNeighborsClassifier(**fixed_params)
train_model(
model = model,
train_data=train_data,
train_targets=train_targets,
test_data=test_data,
test_targets=test_targets,
param_grid=tuned_params,
n_splits=n_splits,
n_iter=n_iter,
basepath=os.path.join(output_dir, profile)
)
def run(experiment_name, description=None,
genres='all', subsets='all', profiles='all',
n_iter=100, n_splits=5,
data_dir = DATA_DIR, results_dir = RESULTS_DIR):
"""Run a 'profile' mode classification experiment using pitch, pitch_class
and repetition profiles."""
# Set up directories
results_dir = os.path.join(results_dir, experiment_name)
if not os.path.exists(results_dir):
os.makedirs(results_dir)
# Get all conditions
conditions, parts = get_conditions(genres, subsets, profiles)
# Start log and log experiment settings
start_experiment_log(
name=experiment_name,
description=description,
data_dir=data_dir,
results_dir=results_dir,
n_iter=n_iter,
n_splits=n_splits,
num_conditions=len(conditions),
**parts)
# Train all models
for condition in conditions:
run_condition(
data_dir=data_dir, results_dir=results_dir,
n_iter=n_iter, n_splits=n_splits, **condition)
def evaluate(experiment_name,
genres='all', subsets='all', profiles='all',
data_dir = DATA_DIR, results_dir = RESULTS_DIR, **kwargs):
"""Evaluate an experiment and store accuracy and retrieval scores in a
single CSV file that can be used to e.g. generate tables and figures."""
logging.info('Evaluating experiment...')
results_dir = os.path.join(results_dir, experiment_name)
scores = []
conditions, _ = get_conditions(genres, subsets, profiles)
for condition in conditions:
profile = condition['profile']
output_dir = os.path.join(
results_dir, condition['genre'], condition['subset'])
condition_scores = get_scores(
test_pred_fn = os.path.join(output_dir, f'{profile}-test-pred.txt'),
train_pred_fn = os.path.join(output_dir, f'{profile}-train-pred.txt'),
genre=condition['genre'],
subset=condition['subset'],
data_dir=data_dir)
condition_scores.update(condition)
scores.append(condition_scores)
scores_fn = os.path.join(results_dir, f'{experiment_name}-scores.csv')
pd.DataFrame(scores).to_csv(scores_fn)
logging.info(f'> Stored scores to {relpath(scores_fn)}')
| bacor/ISMIR2020 | src/profile_experiment.py | profile_experiment.py | py | 7,149 | python | en | code | 4 | github-code | 36 | [
{
"api_name": "numpy.random.seed",
"line_number": 14,
"usage_type": "call"
},
{
"api_name": "numpy.random",
"line_number": 14,
"usage_type": "attribute"
},
{
"api_name": "helpers.SUBSETS",
"line_number": 53,
"usage_type": "name"
},
{
"api_name": "helpers.GENRES",
... |
40857313291 | #!/usr/bin/env python
from __future__ import division, print_function
import argparse
import glob
from array import array
import numpy as np
import scipy as sp
import fitsio
from picca import constants
from picca.data import delta
from picca.Pk1D import (compute_cor_reso, compute_Pk_noise, compute_Pk_raw,
fill_masked_pixels, rebin_diff_noise, split_forest)
from picca.utils import print
def make_tree(tree,nb_bin_max):
zqso = array( 'f', [ 0. ] )
mean_z = array( 'f', [ 0. ] )
mean_reso = array( 'f', [ 0. ] )
mean_SNR = array( 'f', [ 0. ] )
nb_mask_pix = array( 'f', [ 0. ] )
lambda_min = array( 'f', [ 0. ] )
lambda_max= array( 'f', [ 0. ] )
plate = array( 'i', [ 0 ] )
mjd = array( 'i', [ 0 ] )
fiber = array( 'i', [ 0 ] )
nb_r = array( 'i', [ 0 ] )
k_r = array( 'f', nb_bin_max*[ 0. ] )
Pk_r = array( 'f', nb_bin_max*[ 0. ] )
Pk_raw_r = array( 'f', nb_bin_max*[ 0. ] )
Pk_noise_r = array( 'f', nb_bin_max*[ 0. ] )
Pk_diff_r = array( 'f', nb_bin_max*[ 0. ] )
cor_reso_r = array( 'f', nb_bin_max*[ 0. ] )
tree.Branch("zqso",zqso,"zqso/F")
tree.Branch("mean_z",mean_z,"mean_z/F")
tree.Branch("mean_reso",mean_reso,"mean_reso/F")
tree.Branch("mean_SNR",mean_SNR,"mean_SNR/F")
tree.Branch("lambda_min",lambda_min,"lambda_min/F")
tree.Branch("lambda_max",lambda_max,"lambda_max/F")
tree.Branch("nb_masked_pixel",nb_mask_pix,"nb_mask_pixel/F")
tree.Branch("plate",plate,"plate/I")
tree.Branch("mjd",mjd,"mjd/I")
tree.Branch("fiber",fiber,"fiber/I")
tree.Branch( 'NbBin', nb_r, 'NbBin/I' )
tree.Branch( 'k', k_r, 'k[NbBin]/F' )
tree.Branch( 'Pk_raw', Pk_raw_r, 'Pk_raw[NbBin]/F' )
tree.Branch( 'Pk_noise', Pk_noise_r, 'Pk_noise[NbBin]/F' )
tree.Branch( 'Pk_diff', Pk_diff_r, 'Pk_diff[NbBin]/F' )
tree.Branch( 'cor_reso', cor_reso_r, 'cor_reso[NbBin]/F' )
tree.Branch( 'Pk', Pk_r, 'Pk[NbBin]/F' )
return zqso,mean_z,mean_reso,mean_SNR,lambda_min,lambda_max,plate,mjd,fiber,\
nb_mask_pix,nb_r,k_r,Pk_r,Pk_raw_r,Pk_noise_r,cor_reso_r,Pk_diff_r
def compute_mean_delta(ll,delta,iv,zqso):
for i, _ in enumerate (ll):
ll_obs = np.power(10., ll[i])
ll_rf = ll_obs/(1.+zqso)
hdelta.Fill(ll_obs, ll_rf, delta[i])
hdelta_RF.Fill(ll_rf, delta[i])
hdelta_OBS.Fill(ll_obs, delta[i])
hivar.Fill(iv[i])
snr_pixel = (delta[i]+1)*np.sqrt(iv[i])
hsnr.Fill(snr_pixel)
hivar.Fill(iv[i])
if (iv[i] < 1000):
hdelta_RF_we.Fill(ll_rf, delta[i], iv[i])
hdelta_OBS_we.Fill(ll_obs, delta[i], iv[i])
return
if __name__ == '__main__':
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='Compute the 1D power spectrum')
parser.add_argument('--out-dir', type=str, default=None, required=True,
help='Output directory')
parser.add_argument('--out-format', type=str, default='fits', required=False,
help='Output format: root or fits (if root call PyRoot)')
parser.add_argument('--in-dir', type=str, default=None, required=True,
help='Directory to delta files')
parser.add_argument('--in-format', type=str, default='fits', required=False,
help=' Input format used for input files: ascii or fits')
parser.add_argument('--SNR-min',type=float,default=2.,required=False,
help='Minimal mean SNR per pixel ')
parser.add_argument('--reso-max',type=float,default=85.,required=False,
help='Maximal resolution in km/s ')
parser.add_argument('--lambda-obs-min',type=float,default=3600.,required=False,
help='Lower limit on observed wavelength [Angstrom]' )
parser.add_argument('--nb-part',type=int,default=3,required=False,
help='Number of parts in forest')
parser.add_argument('--nb-pixel-min',type=int,default=75,required=False,
help='Minimal number of pixels in a part of forest')
parser.add_argument('--nb-pixel-masked-max',type=int,default=40,required=False,
help='Maximal number of masked pixels in a part of forest')
parser.add_argument('--no-apply-filling', action='store_true', default=False, required=False,
help='Dont fill masked pixels')
parser.add_argument('--noise-estimate', type=str, default='mean_diff', required=False,
help='Estimate of Pk_noise pipeline/diff/mean_diff/rebin_diff/mean_rebin_diff')
parser.add_argument('--forest-type', type=str, default='Lya', required=False,
help='Forest used: Lya, SiIV, CIV')
parser.add_argument('--debug', action='store_true', default=False, required=False,
help='Fill root histograms for debugging')
args = parser.parse_args()
# Create root file
if (args.out_format=='root') :
from ROOT import TH1D, TFile, TTree, TProfile2D, TProfile
storeFile = TFile(args.out_dir+"/Testpicca.root","RECREATE","PK 1D studies studies");
nb_bin_max = 700
tree = TTree("Pk1D","SDSS 1D Power spectrum Ly-a");
zqso,mean_z,mean_reso,mean_SNR,lambda_min,lambda_max,plate,mjd,fiber,\
nb_mask_pix,nb_r,k_r,Pk_r,Pk_raw_r,Pk_noise_r,cor_reso_r,Pk_diff_r = make_tree(tree,nb_bin_max)
# control histograms
if (args.forest_type=='Lya'):
forest_inf=1040.
forest_sup=1200.
elif (args.forest_type=='SiIV'):
forest_inf=1270.
forest_sup=1380.
elif (args.forest_type=='CIV'):
forest_inf=1410.
forest_sup=1520.
hdelta = TProfile2D( 'hdelta', 'delta mean as a function of lambda-lambdaRF', 36, 3600., 7200., 16, forest_inf, forest_sup, -5.0, 5.0)
hdelta_RF = TProfile( 'hdelta_RF', 'delta mean as a function of lambdaRF', 320, forest_inf, forest_sup, -5.0, 5.0)
hdelta_OBS = TProfile( 'hdelta_OBS', 'delta mean as a function of lambdaOBS', 1800, 3600., 7200., -5.0, 5.0)
hdelta_RF_we = TProfile( 'hdelta_RF_we', 'delta mean weighted as a function of lambdaRF', 320, forest_inf, forest_sup, -5.0, 5.0)
hdelta_OBS_we = TProfile( 'hdelta_OBS_we', 'delta mean weighted as a function of lambdaOBS', 1800, 3600., 7200., -5.0, 5.0)
hivar = TH1D('hivar',' ivar ',10000,0.0,10000.)
hsnr = TH1D('hsnr',' snr per pixel ',100,0.0,100.)
hdelta_RF_we.Sumw2()
hdelta_OBS_we.Sumw2()
# Read deltas
if (args.in_format=='fits') :
fi = glob.glob(args.in_dir+"/*.fits.gz")
elif (args.in_format=='ascii') :
fi = glob.glob(args.in_dir+"/*.txt")
data = {}
ndata = 0
# initialize randoms
sp.random.seed(4)
# loop over input files
for i,f in enumerate(fi):
if i%1==0:
print("\rread {} of {} {}".format(i,len(fi),ndata),end="")
# read fits or ascii file
if (args.in_format=='fits') :
hdus = fitsio.FITS(f)
dels = [delta.from_fitsio(h,Pk1D_type=True) for h in hdus[1:]]
elif (args.in_format=='ascii') :
ascii_file = open(f,'r')
dels = [delta.from_ascii(line) for line in ascii_file]
ndata+=len(dels)
print ("\n ndata = ",ndata)
out = None
# loop over deltas
for d in dels:
# Selection over the SNR and the resolution
if (d.mean_SNR<=args.SNR_min or d.mean_reso>=args.reso_max) : continue
# first pixel in forest
for first_pixel,first_pixel_ll in enumerate(d.ll):
if 10.**first_pixel_ll>args.lambda_obs_min : break
# minimum number of pixel in forest
nb_pixel_min = args.nb_pixel_min
if ((len(d.ll)-first_pixel)<nb_pixel_min) : continue
# Split in n parts the forest
nb_part_max = (len(d.ll)-first_pixel)//nb_pixel_min
nb_part = min(args.nb_part,nb_part_max)
m_z_arr,ll_arr,de_arr,diff_arr,iv_arr = split_forest(nb_part,d.dll,d.ll,d.de,d.diff,d.iv,first_pixel)
for f in range(nb_part):
# rebin diff spectrum
if (args.noise_estimate=='rebin_diff' or args.noise_estimate=='mean_rebin_diff'):
diff_arr[f]=rebin_diff_noise(d.dll,ll_arr[f],diff_arr[f])
# Fill masked pixels with 0.
ll_new,delta_new,diff_new,iv_new,nb_masked_pixel = fill_masked_pixels(d.dll,ll_arr[f],de_arr[f],diff_arr[f],iv_arr[f],args.no_apply_filling)
if (nb_masked_pixel> args.nb_pixel_masked_max) : continue
if (args.out_format=='root' and args.debug): compute_mean_delta(ll_new,delta_new,iv_new,d.zqso)
lam_lya = constants.absorber_IGM["LYA"]
z_abs = np.power(10.,ll_new)/lam_lya - 1.0
mean_z_new = sum(z_abs)/float(len(z_abs))
# Compute Pk_raw
k,Pk_raw = compute_Pk_raw(d.dll,delta_new,ll_new)
# Compute Pk_noise
run_noise = False
if (args.noise_estimate=='pipeline'): run_noise=True
Pk_noise,Pk_diff = compute_Pk_noise(d.dll,iv_new,diff_new,ll_new,run_noise)
# Compute resolution correction
delta_pixel = d.dll*np.log(10.)*constants.speed_light/1000.
cor_reso = compute_cor_reso(delta_pixel,d.mean_reso,k)
# Compute 1D Pk
if (args.noise_estimate=='pipeline'):
Pk = (Pk_raw - Pk_noise)/cor_reso
elif (args.noise_estimate=='diff' or args.noise_estimate=='rebin_diff'):
Pk = (Pk_raw - Pk_diff)/cor_reso
elif (args.noise_estimate=='mean_diff' or args.noise_estimate=='mean_rebin_diff'):
selection = (k>0) & (k<0.02)
if (args.noise_estimate=='mean_rebin_diff'):
selection = (k>0.003) & (k<0.02)
Pk_mean_diff = sum(Pk_diff[selection])/float(len(Pk_diff[selection]))
Pk = (Pk_raw - Pk_mean_diff)/cor_reso
# save in root format
if (args.out_format=='root'):
zqso[0] = d.zqso
mean_z[0] = m_z_arr[f]
mean_reso[0] = d.mean_reso
mean_SNR[0] = d.mean_SNR
lambda_min[0] = np.power(10.,ll_new[0])
lambda_max[0] = np.power(10.,ll_new[-1])
nb_mask_pix[0] = nb_masked_pixel
plate[0] = d.plate
mjd[0] = d.mjd
fiber[0] = d.fid
nb_r[0] = min(len(k),nb_bin_max)
for i in range(nb_r[0]) :
k_r[i] = k[i]
Pk_raw_r[i] = Pk_raw[i]
Pk_noise_r[i] = Pk_noise[i]
Pk_diff_r[i] = Pk_diff[i]
Pk_r[i] = Pk[i]
cor_reso_r[i] = cor_reso[i]
tree.Fill()
# save in fits format
if (args.out_format=='fits'):
hd = [ {'name':'RA','value':d.ra,'comment':"QSO's Right Ascension [degrees]"},
{'name':'DEC','value':d.dec,'comment':"QSO's Declination [degrees]"},
{'name':'Z','value':d.zqso,'comment':"QSO's redshift"},
{'name':'MEANZ','value':m_z_arr[f],'comment':"Absorbers mean redshift"},
{'name':'MEANRESO','value':d.mean_reso,'comment':'Mean resolution [km/s]'},
{'name':'MEANSNR','value':d.mean_SNR,'comment':'Mean signal to noise ratio'},
{'name':'NBMASKPIX','value':nb_masked_pixel,'comment':'Number of masked pixels in the section'},
{'name':'PLATE','value':d.plate,'comment':"Spectrum's plate id"},
{'name':'MJD','value':d.mjd,'comment':'Modified Julian Date,date the spectrum was taken'},
{'name':'FIBER','value':d.fid,'comment':"Spectrum's fiber number"}
]
cols=[k,Pk_raw,Pk_noise,Pk_diff,cor_reso,Pk]
names=['k','Pk_raw','Pk_noise','Pk_diff','cor_reso','Pk']
comments=['Wavenumber', 'Raw power spectrum', "Noise's power spectrum", 'Noise coadd difference power spectrum',\
'Correction resolution function', 'Corrected power spectrum (resolution and noise)']
units=['(km/s)^-1', 'km/s', 'km/s', 'km/s', 'km/s', 'km/s']
try:
out.write(cols,names=names,header=hd,comments=comments,units=units)
except AttributeError:
out = fitsio.FITS(args.out_dir+'/Pk1D-'+str(i)+'.fits.gz','rw',clobber=True)
out.write(cols,names=names,header=hd,comment=comments,units=units)
if (args.out_format=='fits' and out is not None):
out.close()
# Store root file results
if (args.out_format=='root'):
storeFile.Write()
print ("all done ")
| vserret/picca | bin/picca_Pk1D.py | picca_Pk1D.py | py | 13,158 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "array.array",
"line_number": 22,
"usage_type": "call"
},
{
"api_name": "array.array",
"line_number": 23,
"usage_type": "call"
},
{
"api_name": "array.array",
"line_number": 24,
"usage_type": "call"
},
{
"api_name": "array.array",
"line_number": ... |
13963392850 | #!/usr/bin/python3
import cv2
import numpy as np
import imutils
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video", help="path to the video file", default="video.mp4")
ap.add_argument("-t", "--template", help="template png file with the wanted output dimensions")
ap.add_argument("-o", "--output", help="output video", default="output.mp4")
args = vars(ap.parse_args())
def findEdges(image):
ratio = image.shape[0] / 500.0
image = imutils.resize(image, height=500)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (5, 5), 0)
edged = cv2.Canny(gray, 75, 200)
cnts = cv2.findContours(edged.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
cnts = sorted(cnts, key=cv2.contourArea, reverse=True)[:5]
for c in cnts:
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.02 * peri, True)
if len(approx) == 4:
return (approx * ratio).astype(int)
def sortPoints(pts):
# order 4 points by top-left, top-right, bottom-right, bottom-left
rect = np.zeros((4, 2), dtype="float32")
s = pts.sum(axis=1)
rect[0] = pts[np.argmin(s)]
rect[2] = pts[np.argmax(s)]
diff = np.diff(pts, axis=1)
rect[1] = pts[np.argmin(diff)]
rect[3] = pts[np.argmax(diff)]
# return the ordered coordinates
return rect
def part1(video, template, output):
# split video input frame by frame
vidcap = cv2.VideoCapture(video)
success, image = vidcap.read()
count = 0
inputFrames = []
while success:
inputFrames.append(image)
success, image = vidcap.read()
count += 1
# open the template image
output_shape = cv2.imread(template).shape if template else (1650, 1275, 3)
# start the video writer for the video output
out_video = cv2.VideoWriter(output, cv2.VideoWriter_fourcc(
*'mp4v'), 30, (output_shape[1], output_shape[0]))
# since the a4 page doesn't change position we can simply grab the
# edges position from the first video frame
a4Edges = sortPoints(findEdges(inputFrames[0]).reshape(4, 2))
# add a small margin to compensate for the not so perfect edge detection
# *this margin was checked manually*
a4Edges = np.array([a4Edges[0] + [4, -8],
a4Edges[1] + [9, -2],
a4Edges[2] + [4, 6],
a4Edges[3] + [0, 5]],
dtype="float32")
outputEdges = np.array([[0, 0],
[output_shape[0] - 1, 0],
[output_shape[0] - 1, output_shape[1] - 1],
[0, output_shape[1] - 1]],
dtype="float32")
# get the homography matrix using both images edges
homography = cv2.getPerspectiveTransform(a4Edges, outputEdges)
# apply the homography transformation to every frame from the input vid
for frame in inputFrames:
transformed = cv2.warpPerspective(frame, homography, output_shape[:2])
out_video.write(cv2.rotate(transformed, cv2.ROTATE_90_CLOCKWISE))
out_video.release()
part1(args["video"], args["template"], args["output"])
| rickerp/video-page-scanner | main.py | main.py | py | 3,233 | python | en | code | 1 | github-code | 36 | [
{
"api_name": "argparse.ArgumentParser",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "imutils.resize",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "cv2.cvtColor",
"line_number": 19,
"usage_type": "call"
},
{
"api_name": "cv2.COLOR_BGR2GRAY... |
36085086671 | import numpy as np
import time
from pipython import GCSDevice, pitools
from core.module import Base, Connector
from core.configoption import ConfigOption
from interface.confocal_scanner_interface import ConfocalScannerInterface
class ConfocalScannerPI_E727(Base, ConfocalScannerInterface):
""" Confocal scanner for PI E727 controller.
"""
_modclass = 'ConfocalScannerPI_E727'
_modtype = 'hardware'
# connectors
fitlogic = Connector(interface='FitLogic')
# config
# clock_frequency = ConfigOption('clock_frequency', 100, missing='warn') # I have no idea if this is reqired for E727 or not...
# The clock freq. sets the "resolution" for the picture along the scanned line...
# ... it dosn't... maybe it is used for sync.-ing?
E727_USBserial = ConfigOption('E727_USBserial', '0119019672', missing='warn')
CONTROLLERNAME = 'E-727'
STAGES = None # stage model not needed, but stage used is 'P-733.3CD'
REFMODES = None
def __init__(self, config, **kwargs):
super().__init__(config=config, **kwargs)
# Internal parameters
self._line_length = None
self._voltage_range = [-10., 10.]
self._position_range = [[0, 100e-6], [0, 100e-6], [0, 10e-6], [0, 1e-6]]
# self._position_range = [[self.get_position_range()[0][0], self.get_position_range()[0][1]],
# [self.get_position_range()[1][0], self.get_position_range()[1][1]],
# [self.get_position_range()[2][0], self.get_position_range()[2][1]], [0, 1e-6]]
self._current_position = [0, 0, 0, 0][0:len(self.get_scanner_axes())]
def on_activate(self):
""" Initialisation performed during activation of the module.
Connect to a PIPython device, using GCSDevice as context manager with "with".
Different options for establishing connection to E-727 controller module, are given below.
"""
self.e727_controller = GCSDevice(self.CONTROLLERNAME) #.ConnectUSB(serialnum=self.E727_USBserial)
# pidevice.ConnectTCPIP(ipaddress='192.168.178.42')
self.e727_controller.ConnectUSB(serialnum=self.E727_USBserial)
# pidevice.ConnectRS232(comport=1, baudrate=115200)
pitools.startup(self.e727_controller, stages=self.STAGES, refmodes=self.REFMODES)
def on_deactivate(self):
""" Deactivate properly the confocal scanner dummy.
"""
self.reset_hardware()
def reset_hardware(self):
""" Resets the hardware, so the connection is lost and other programs
can access it.
@return int: error code (0:OK, -1:error)
"""
if self.e727_controller.IsConnected() == True:
self.e727_controller.close()
if self.e727_controller.IsConnected() == False:
self.log.warning('Scanning Device will be reset.')
return 0
else:
return -1
def get_position_range(self):
""" Returns the physical range of the scanner.
@return float [4][2]: array of 4 ranges with an array containing lower
and upper limit
"""
rangemin = self.e727_controller.qTMN()
rangemax = self.e727_controller.qTMX()
return [[round(rangemin['1']*1e-6, 9), round(rangemax['1']*1e-6, 9)],
[round(rangemin['2']*1e-6, 9), round(rangemax['2']*1e-6, 9)],
[round(rangemin['3']*1e-6, 9), round(rangemax['3']*1e-6, 9)], [0, 1e-6]]
# return [[rangemin['1']* 1e-6, rangemax['1']*1e-6],
# [rangemin['2']* 1e-6, rangemax['2']*1e-6],
# [rangemin['3']* 1e-6, rangemax['3']*1e-6], [0, 1e-6]]
#return self._position_range
def set_position_range(self, myrange=None):
""" Sets the physical range of the scanner.
@param float [4][2] myrange: array of 4 ranges with an array containing
lower and upper limit
@return int: error code (0:OK, -1:error)
"""
if myrange is None:
myrange = [[0, 100e-6], [0, 100e-6], [0, 10e-6], [0, 1e-6]]
if not isinstance(myrange, (frozenset, list, set, tuple, np.ndarray, )):
self.log.error('Given range is no array type.')
return -1
if len(myrange) != 4:
self.log.error('Given range should have dimension 4, but has '
'{0:d} instead.'.format(len(myrange)))
return -1
for pos in myrange:
if len(pos) != 2:
self.log.error('Given range limit {1:d} should have '
'dimension 2, but has {0:d} instead.'.format(
len(pos),
pos))
return -1
if pos[0]>pos[1]:
self.log.error('Given range limit {0:d} has the wrong '
'order.'.format(pos))
return -1
self._position_range = myrange
return 0
def set_voltage_range(self, myrange=None):
""" Sets the voltage range of the E727 controller.
@param float [2] myrange: array containing lower and upper limit
@return int: error code (0:OK, -1:error)
"""
if myrange is None:
myrange = [-10., 10.]
if not isinstance(myrange, (frozenset, list, set, tuple, np.ndarray, )):
self.log.error('Given range is no array type.')
return -1
if len(myrange) != 2:
self.log.error('Given range should have dimension 2, but has '
'{0:d} instead.'.format(len(myrange)))
return -1
if myrange[0]>myrange[1]:
self.log.error('Given range limit {0:d} has the wrong '
'order.'.format(myrange))
return -1
if self.module_state() == 'locked':
self.log.error('A Scanner is already running, close this one '
'first.')
return -1
self._voltage_range = myrange
return 0
def get_scanner_axes(self):
""" Dummy scanner is always 3D cartesian.
"""
return ['x', 'y', 'z', 'a']
def get_scanner_count_channels(self):
""" Counting channels in confocal: normal, negative and a ramp."""
return ['Norm']
def set_up_scanner_clock(self, clock_frequency=None, clock_channel=None):
""" Configures the hardware clock of the NiDAQ card to give the timing.
@param float clock_frequency: if defined, this sets the frequency of the
clock
@param str clock_channel: if defined, this is the physical channel of
the clock
@return int: error code (0:OK, -1:error)
"""
if clock_frequency is not None:
self._clock_frequency = float(clock_frequency)
self.log.debug('ConfocalScanner_PI_E-727>set_up_scanner_clock')
return 0
def set_up_scanner(self, counter_channels=None, sources=None, clock_channel=None, scanner_ao_channels=None):
""" Configures the actual scanner with a given clock.
@param str counter_channel: if defined, this is the physical channel of
the counter
@param str photon_source: if defined, this is the physical channel where
the photons are to count from
@param str clock_channel: if defined, this specifies the clock for the
counter
@param str scanner_ao_channels: if defined, this specifies the analoque
output channels
@return int: error code (0:OK, -1:error)
"""
self.log.debug('ConfocalScanner_PI_E-727>set_up_scanner')
return 0
def scanner_set_position(self, x=None, y=None, z=None, a=None):
""" Move stage to x, y, z, a (where a is the fourth voltage channel).
@param float x: postion in x-direction (volts)
@param float y: postion in y-direction (volts)
@param float z: postion in z-direction (volts)
@param float a: postion in a-direction (volts)
@return int: error code (0:OK, -1:error)
"""
if self.module_state() == 'locked':
self.log.error('A Scanner is already running, close this one first.')
return -1
coord_list = [x*1e6, y*1e6, z*1e6, a*1e6]
# t0 = time.clock()
for axis, target in zip(self.e727_controller.axes, coord_list[:-1]):
self.e727_controller.MOV(axis, target)
pitools.waitontarget(self.e727_controller) #, axes=axis) # this takes up ca. 0.12 s...
# wait_time = time.clock() - t0
self._current_position = [x, y, z, a][0:len(self.get_scanner_axes())]
# print('current_scanner_position: ' + str(self._current_position))
return 0
def get_scanner_position(self):
""" Get the current position of the scanner hardware.
@return float[]: current position in (x, y, z, a).
"""
# curpos = self.e727_controller.qPOS()
# self._current_position = [curpos['1']*1e-6, curpos['2']*1e-6, curpos['3']*1e-6, self._current_position[-1]*1e-6]
# # self._current_position = [curpos['1'], curpos['2'], curpos['3'], self._current_position[-1]]
return self._current_position[0:len(self.get_scanner_axes())]
def _set_up_line(self, length=100):
""" Sets up the analoque output for scanning a line.
@param int length: length of the line in pixel
@return int: error code (0:OK, -1:error)
"""
self._line_length = length
self.log.debug('ConfocalScannerPI_E-727>set_up_line')
return 0
def scan_line(self, line_path=None, pixel_clock=False):
""" Scans a line and returns the counts on that line.
@param float[][4] line_path: array of 4-part tuples defining the voltage points
@param bool pixel_clock: whether we need to output a pixel clock for this line
@return float[]: the photon counts per second
"""
if not isinstance(line_path, (frozenset, list, set, tuple, np.ndarray, )):
self.log.error('Given voltage list is no array type.')
return np.array([[-1.]])
if np.shape(line_path)[1] != self._line_length:
self._set_up_line(np.shape(line_path)[1])
self._current_position = list(line_path[:, -1])
return np.array([[i] for i in range(self._line_length)])#.transpose()
def close_scanner(self):
""" Closes the scanner and cleans up afterwards.
@return int: error code (0:OK, -1:error)
"""
self.log.debug('ConfocalScannerDummy>close_scanner')
return 0
def close_scanner_clock(self, power=0):
""" Closes the clock and cleans up afterwards.
@return int: error code (0:OK, -1:error)
"""
self.log.debug('ConfocalScannerDummy>close_scanner_clock')
return 0
def _volt_to_position(self, volts=None):
""" Converts a set of position pixels to actual voltages.
@param float[][n]: array of n-part tuples of corresponding voltages
@return float[] positions: array of n-part tuples defining the pixels
The positions is typically a matrix like
[[x_values], [y_values], [z_values], [a_values]]
but x, xy, xyz and xyza are allowed formats.
"""
if not isinstance(volts, (frozenset, list, set, tuple, np.ndarray, )):
self.log.error('Given voltage list is no array type.')
return np.array([np.NaN])
poslist = []
for i, volt in enumerate(volts):
poslist.append(
round((self._position_range[i][1] - self._position_range[i][0])
/ (self._voltage_range[1] - self._voltage_range[0])
* (volt - self._voltage_range[0])
+ self._position_range[i][0], 9)
)
positions = np.vstack(poslist)
for i, pos in enumerate(positions):
if pos.min() < self._position_range[i][0] or pos.max() > self._position_range[i][1]:
self.log.error(
'Positions ({0}, {1}) exceed the limit, the positions have to '
'be adjusted to stay in the given range.'.format(pos.min(), pos.max()))
return np.array([np.NaN])
positions = [i[0] for i in positions]
# print(positions)
return positions
| chrberrig/qudi_from_lab | hardware/confocal_scanner_PI_E-727.py | confocal_scanner_PI_E-727.py | py | 12,609 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "core.module.Base",
"line_number": 9,
"usage_type": "name"
},
{
"api_name": "interface.confocal_scanner_interface.ConfocalScannerInterface",
"line_number": 9,
"usage_type": "name"
},
{
"api_name": "core.module.Connector",
"line_number": 18,
"usage_type": "ca... |
243273712 | import json
from flask import Flask, render_template, \
request, redirect, flash, \
url_for
def _initialize_clubs():
data = {'clubs': []}
with open('clubs.json' , 'w') as club_file:
json.dump(data, club_file, indent=4)
return []
def loadClubs():
with open('clubs.json') as c:
try:
listOfClubs = json.load(c)['clubs']
# listOfClubs est une liste de dictionnaires avec les clés name, email, points
return listOfClubs
except KeyError:
c.close()
listOfClubs = _initialize_clubs()
return listOfClubs
def _initialize_competitions():
data = {'competitions': []}
with open('competitions.json', 'w') as comp_file:
json.dump(data, comp_file, indent=4)
return []
def loadCompetitions():
with open('competitions.json') as comps:
try:
listOfCompetitions = json.load(comps)['competitions']
return listOfCompetitions
except KeyError:
comps.close()
listOfCompetitions = _initialize_competitions()
return listOfCompetitions
def clubs_with_comp_keys(the_clubs, competitions):
for c in the_clubs:
c['reserved_places'] = {}
for comp in competitions:
c['reserved_places'][comp['name']] = 0
return the_clubs
app = Flask(__name__)
app.secret_key = 'something_special'
competitions = loadCompetitions()
only_clubs = loadClubs()
clubs = clubs_with_comp_keys(only_clubs, competitions)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/showSummary', methods=['POST'])
def showSummary():
club = [club for club in clubs if club['email'] == request.form['email']]
if club == []:
flash("Sorry, this email doesn't exist in the database...")
return redirect(url_for('index'))
else:
actual_club = club[0]
return render_template('welcome.html', club=actual_club, competitions=competitions)
@app.route('/book/<competition>/<club>')
def book(competition, club):
foundClub = [c for c in clubs if c['name'] == club]
foundCompetition = [c for c in competitions if c['name'] == competition]
if foundClub and foundCompetition:
return render_template('booking.html', club=foundClub[0], competition=foundCompetition[0])
else:
flash("Something went wrong-please try again")
return render_template('welcome.html', club=club, competitions=competitions)
@app.route('/purchasePlaces', methods=['POST'])
def purchasePlaces():
competition = [c for c in competitions if c['name'] == request.form['competition']]
club = [c for c in clubs if c['name'] == request.form['club']]
if club == [] or competition == []:
flash("Something went wrong: club does not exist \n \
or there is no competitions")
return render_template('welcome.html', club=club, competitions=competitions)
else:
the_competition = competition[0]
the_club = club[0]
competition_name = the_competition['name']
already_reserved = the_club['reserved_places'][competition_name]
placesRequired = int(request.form['places'])
total_places_reserved = placesRequired + already_reserved
actual_club_points = int(the_club['points'])
actual_competition_points = int(the_competition['numberOfPlaces'])
new_club_points = actual_club_points - placesRequired
new_competition_points = actual_competition_points - placesRequired
if new_competition_points < 0:
flash("There's not enough places in this competition \n \
to book all these places")
return render_template('welcome.html',
club=the_club,
competitions=competitions)
elif new_club_points < 0:
flash("You don't own enough points to book all these places")
return render_template('booking.html',
club=the_club,
competition=the_competition)
elif total_places_reserved > 12:
flash("You can't book more than 12 places for a competition !")
return render_template('booking.html',
club=the_club,
competition=the_competition)
else:
the_competition['numberOfPlaces'] = str(new_competition_points)
the_club['points'] = str(new_club_points)
the_club['reserved_places'][competition_name] = total_places_reserved
flash('Great-booking complete!')
return render_template('welcome.html',
club=the_club,
competitions=competitions)
@app.route('/showClubsPoints/<club>')
def showClubsPoints(club):
# club est vide si on vient de l'index /
# club est le nom du club connecté
# si on vient de /showSummary ou de /book/...
if club == 'offline':
actual_club = {'name': 'offline'}
return render_template('dashboard.html',
actual_club=actual_club,
clubs=clubs)
else:
the_club = [c for c in clubs if c['name'] == club]
if the_club:
actual_club = the_club[0]
return render_template('dashboard.html',
actual_club=actual_club,
clubs=clubs)
else:
flash("Something went wrong-please try again")
return redirect(url_for('index'))
@app.route('/logout')
def logout():
flash('You are now disconnected, thank you for your visit here !')
return redirect(url_for('index'))
| Arz4cordes/Projet11_OC | Python_Testing-master/server.py | server.py | py | 5,876 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "json.dump",
"line_number": 10,
"usage_type": "call"
},
{
"api_name": "json.load",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "json.dump",
"line_number": 29,
"usage_type": "call"
},
{
"api_name": "json.load",
"line_number": 36,
... |
36815278942 | from ckeditor_uploader.fields import RichTextUploadingField
from django.core.exceptions import ValidationError
from django.db import models
# Create your models here.
from django.utils.safestring import mark_safe
from extensions.utils import jalali_converter
class Setting(models.Model):
STATUS = (
('True', 'فعال'),
('False', 'غیرغعال'),
)
title = models.CharField(max_length=150, verbose_name='عنوان وب سایت')
keywords = models.CharField(max_length=255, verbose_name='کلمات کلیدی')
description = models.CharField(max_length=255, verbose_name='عنوان')
company = models.CharField(blank=True, max_length=50, verbose_name='شرکت')
address = models.CharField(blank=True, max_length=255, verbose_name='آدرس')
phone = models.CharField(blank=True, max_length=20,
verbose_name="تلفن همراه (جهت دریافت اطلاع رسانی پیامکی وبسایت)")
phone2 = models.CharField(blank=True, max_length=20, verbose_name='شماره تماس ثابت')
fax = models.CharField(blank=True, max_length=20, verbose_name='فکس')
email = models.CharField(blank=True, max_length=70, verbose_name='ایمیل')
# smtpserver = models.CharField(blank=True, max_length=20)
# smtpemail = models.CharField(blank=True, max_length=50)
# smtppassword = models.CharField(blank=True, max_length=50)
# smtpport = models.CharField(blank=True, max_length=5)
icon = models.ImageField(blank=True, upload_to='images/', verbose_name='لوگو')
facecbook = models.CharField(blank=True, max_length=50, verbose_name='فیسبوک')
instagram = models.CharField(blank=True, max_length=50, verbose_name='اینستاگرام')
telegram = models.CharField(blank=True, max_length=50, verbose_name='تلگرام')
youtube = models.CharField(blank=True, max_length=50, verbose_name='یوتیوب')
twitter = models.CharField(blank=True, max_length=50, verbose_name='توییتر')
aboutus = RichTextUploadingField(blank=True, verbose_name='درباره ما')
contact = RichTextUploadingField(blank=True, verbose_name='تماس با ما')
worktime = RichTextUploadingField(blank=True, verbose_name='ساعت کاری')
customerservices = RichTextUploadingField(blank=True, verbose_name='خدمات مشتریان')
notices = RichTextUploadingField(blank=True, verbose_name="اعلامیه وبسایت")
status = models.CharField(max_length=20, choices=STATUS, verbose_name='وضعیت')
create_at = models.DateTimeField(auto_now_add=True, verbose_name='تاریخ ایجاد')
update_at = models.DateTimeField(auto_now=True, verbose_name='تاریخ بروزرسانی')
def __String__(self):
return self.title
class Meta:
verbose_name = 'اطلاعات وب سایت'
verbose_name_plural = 'اطلاعات سایت'
def j_date(self):
return jalali_converter(self.create_at)
j_date.short_description = 'تاریخ'
class SliderManager(models.Manager):
def active(self):
return self.filter(status=True)
class SliderContent(models.Model):
description = models.CharField(blank=True, max_length=255, verbose_name="توضیحات")
image = models.ImageField(upload_to='images/', verbose_name="تصویر",
help_text="حداقل نسبت تصویر 2:1 می باشد - رزولوشن قابل قبول 400 * 1500")
status = models.BooleanField(default=True, verbose_name="وضعیت")
page_url = models.URLField(max_length=200, verbose_name="آدرس")
ordering_position = models.IntegerField(verbose_name="ترتیب نمایش اسلاید")
objects = SliderManager()
def __str__(self):
return self.description
def image_tag(self):
return mark_safe('<img style="border-radius: 5px" src="{}" height="75"/>'.format(self.image.url))
image_tag.short_description = "تصویر"
class Meta:
verbose_name = 'اسلاید'
verbose_name_plural = 'اسلایدر'
ordering = ["ordering_position"]
class ContactMessage(models.Model):
STATUS = (
('New', 'جدید'),
('Read', 'خوانده شده'),
('Closed', 'بسته'),
)
name = models.CharField(max_length=30, blank=True, verbose_name='نام و نام خانوادگی')
email = models.EmailField(max_length=70, verbose_name='ایمیل')
subject = models.CharField(max_length=50, verbose_name='موضوع')
message = models.CharField(max_length=255, verbose_name='پیام')
note = models.CharField(blank=True, max_length=255, verbose_name='پاسخ')
ip = models.CharField(blank=True, max_length=20, verbose_name='آی پی')
status = models.CharField(max_length=30, choices=STATUS, default='New', verbose_name='وضعیت')
create_at = models.DateTimeField(auto_now_add=True, verbose_name='تاریخ ایجاد')
update_at = models.DateTimeField(auto_now=True, verbose_name='تاریخ بروزرسانی')
def __str__(self):
return self.name
class Meta:
verbose_name = 'پیام'
verbose_name_plural = 'پیام ها'
def j_date(self):
return jalali_converter(self.create_at)
j_date.short_description = 'تاریخ'
class BannerManager(models.Manager):
def active(self):
return self.filter(status=True)
class BannerContent(models.Model):
description = models.CharField(max_length=255, verbose_name="توضیحات")
image = models.ImageField(upload_to='images/', verbose_name="تصویر اصلی",
help_text="این بنر در قسمت پایین صفحه اصلی سایت نمایش داده خواهد شد.")
status = models.BooleanField(default=True, verbose_name="وضعیت")
page_url = models.URLField(max_length=200, verbose_name="آدرس")
ordering_position = models.IntegerField(verbose_name="ترتیب نمایش بنر")
objects = BannerManager()
def __str__(self):
return self.description
def image_tag(self):
return mark_safe('<img style="border-radius: 5px" src="{}" height="75"/>'.format(self.image.url))
image_tag.short_description = "تصویر"
class Meta:
verbose_name = 'بنر'
verbose_name_plural = 'بنرها'
ordering = ["ordering_position"]
class BrandManager(models.Manager):
def active(self):
return self.filter(status=True)
class BrandContent(models.Model):
description = models.CharField(max_length=255, verbose_name="توضیحات")
logo = models.ImageField(upload_to='images/', verbose_name="تصویر")
status = models.BooleanField(default=True, verbose_name="وضعیت")
page_url = models.URLField(max_length=200, verbose_name="آدرس")
ordering_position = models.IntegerField(verbose_name="ترتیب نمایش برند")
objects = BrandManager()
def __str__(self):
return self.description
def image_tag(self):
return mark_safe('<img style="border-radius: 5px" src="{}" height="75"/>'.format(self.logo.url))
image_tag.short_description = "تصویر"
class Meta:
verbose_name = 'برند'
verbose_name_plural = 'برندها'
ordering = ["ordering_position"]
class FAQ(models.Model):
STATUS = (
('True', 'فعال'),
('False', 'غیرغعال'),
)
ordering_number = models.IntegerField()
question = models.CharField(max_length=300, verbose_name='سوال')
answer = RichTextUploadingField(blank=True, verbose_name='پاسخ')
status = models.CharField(max_length=20, choices=STATUS, verbose_name='وضعیت')
create_at = models.DateTimeField(auto_now_add=True, verbose_name='تاریخ ایجاد')
update_at = models.DateTimeField(auto_now=True, verbose_name='تاریخ بروزرسانی')
def j_date(self):
return jalali_converter(self.create_at)
j_date.short_description = 'تاریخ'
class Meta:
verbose_name = 'سوال'
verbose_name_plural = 'سوالات پر تکرار'
ordering = ["ordering_number"]
| amirmovafagh/ecommerce-project-django | home/models.py | models.py | py | 8,228 | python | fa | code | 0 | github-code | 36 | [
{
"api_name": "django.db.models.Model",
"line_number": 11,
"usage_type": "attribute"
},
{
"api_name": "django.db.models",
"line_number": 11,
"usage_type": "name"
},
{
"api_name": "django.db.models.CharField",
"line_number": 16,
"usage_type": "call"
},
{
"api_name"... |
15733676375 | __author__ = "evas"
__docformat__ = "reStructuredText"
import logging
import numpy as np
# http://stackoverflow.com/questions/12459811/how-to-embed-matplotib-in-pyqt-for-dummies
# see also: http://matplotlib.org/users/navigation_toolbar.html
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.colors import LogNorm
from matplotlib.figure import Figure
from PyQt5 import QtWidgets
from PyQt5.QtCore import QObject, Qt, pyqtSignal
# a useful constant
from uwsift.common import Info
from uwsift.model.layer_model import LayerModel
from uwsift.queue import TASK_DOING, TASK_PROGRESS
# Stuff for custom toolbars
try:
import matplotlib.backends.qt_editor.figureoptions as figureoptions
except ImportError:
figureoptions = None
LOG = logging.getLogger(__name__)
DEFAULT_POINT_PROBE = "default_probe_name"
class CustomNavigationToolbar(NavigationToolbar):
"""Custom matplotlib toolbar."""
def __init__(self, *args, **kwargs):
self.__include_colorbar = kwargs.get("include_colorbar", False)
super(CustomNavigationToolbar, self).__init__(*args, **kwargs)
def edit_parameters(self):
allaxes = self.canvas.figure.get_axes()
if not allaxes:
QtWidgets.QMessageBox.warning(self.parent, "Error", "There are no axes to edit.")
return
elif len(allaxes) == 1:
(axes,) = allaxes
else:
titles = []
not_colorbar_idx = -1
for idx, axes in enumerate(allaxes):
if any(x.colorbar for x in axes.images):
not_colorbar_idx = idx
name = (
axes.get_title()
or " - ".join(filter(None, [axes.get_xlabel(), axes.get_ylabel()]))
or "<anonymous {} (id: {:#x})>".format(type(axes).__name__, id(axes))
)
titles.append(name)
if len(titles) == 2 and not_colorbar_idx != -1 and not self.__include_colorbar:
axes = allaxes[not_colorbar_idx]
else:
item, ok = QtWidgets.QInputDialog.getItem(self.parent, "Customize", "Select axes:", titles, 0, False)
if ok:
axes = allaxes[titles.index(str(item))]
else:
return
figureoptions.figure_edit(axes, self)
class ProbeGraphManager(QObject):
"""The ProbeGraphManager manages the many tabs of the Area Probe Graphs."""
# signals
didChangeTab = pyqtSignal(
tuple,
) # list of probe areas to show
didClonePolygon = pyqtSignal(str, str)
drawChildGraph = pyqtSignal(
str,
)
pointProbeChanged = pyqtSignal(str, bool, tuple)
def __init__(self, tab_widget, auto_update_checkbox, update_button, workspace, layer_model: LayerModel, queue):
"""Set up our tab widget with an appropriate graph object in the first
tab.
FUTURE, once we are saving our graph configurations, load those instead
of setting up this default.
:param auto_update_checkbox: the QCheckBox defined in the pov_main.ui
file. It's logic - to switch on/off automatic update of graphs
is managed here
:param update_button: the QButton defined in the pov_main.ui
file to trigger manual graph updates in case auto_update_checkbox
is off.
"""
super(ProbeGraphManager, self).__init__(tab_widget)
# hang on to the workspace
self.workspace = workspace
self.layer_model = layer_model
self.queue = queue
# hang on to the tab widget
self.tab_widget_object = tab_widget
self.new_tab_button = QtWidgets.QToolButton()
self.new_tab_button.setText("+")
self.tab_widget_object.setCornerWidget(self.new_tab_button, corner=Qt.TopLeftCorner)
self.tab_widget_object.clear() # Delete all tabs that may have been created in the Designer
self.auto_update_checkbox = auto_update_checkbox
self.update_button = update_button
# hold on to point probe locations (point probes are shared across tabs)
self.point_probes: dict = {}
# set up the first tab
self.graphs: list = []
self.selected_graph_index = -1
self.max_tab_letter = "A"
# hook things up so we know when the selected tab changes
self.tab_widget_object.currentChanged[int].connect(self._handle_tab_change)
self.drawChildGraph.connect(self._draw_child)
# hook up signals relating to changes in the number of tabs
self.new_tab_button.clicked.connect(self._add_tab)
# hook up auto update vs manual update changes
self.update_button.clicked.connect(self.handleActiveProductDatasetsChanged)
self.update_button.clicked.connect(self._update_default_point_probe_graph)
self.auto_update_checkbox.stateChanged.connect(self._on_auto_update_checkbox_state_changed)
self.auto_update_checkbox.setCheckState(Qt.Unchecked)
def _draw_child(self, child_name):
for child in self.graphs:
if child.myName == child_name:
child._draw()
break
def set_up_tab(self, tab_index, do_increment_tab_letter=True):
"""Create a new tab at tab_index and add it to the list of graphs"""
# increment our tab label letter if desired
if do_increment_tab_letter:
self.max_tab_letter = chr(ord(self.max_tab_letter) + 1) # this will get strange after Z!
# create our tab
temp_widget = QtWidgets.QWidget()
self.tab_widget_object.insertTab(tab_index, temp_widget, self.max_tab_letter)
# create the associated graph display object
graph = ProbeGraphDisplay(self, temp_widget, self.workspace, self.layer_model, self.queue, self.max_tab_letter)
self.graphs.append(graph)
# load up the layers for this new tab
graph.set_possible_layers()
# clone the previous tab
if self.selected_graph_index != tab_index:
# if we aren't setting up the initial tab, clone the current tab
current_graph = self.graphs[self.selected_graph_index]
graph.set_default_layer_selections([current_graph.xSelectedUUID, current_graph.ySelectedUUID])
# give it a copy of the current full_data_selection or polygon
if current_graph.full_data_selection:
graph.setRegion(select_full_data=graph.full_data_selection)
else:
graph.setRegion(polygon_points=current_graph.polygon[:] if current_graph.polygon is not None else None)
graph.checked = current_graph.checked
point_status, point_xy = self.point_probes.get(DEFAULT_POINT_PROBE, (None, None))
point_xy = point_xy if point_status else None
graph.setPoint(point_xy, rebuild=False)
# Create the initial plot
graph.rebuildPlot()
# go to the tab we just created
self.tab_widget_object.setCurrentIndex(tab_index)
def handleActiveProductDatasetsChanged(self):
"""Used when the layer model signals that something about the layers
has changed
"""
# reload the layer list for the existing graphs
for graphObj in self.graphs:
doRebuild = graphObj is self.graphs[self.selected_graph_index]
graphObj.set_possible_layers(do_rebuild_plot=doRebuild) # FIXME
def current_graph_set_region(self, polygon_points=None, select_full_data=False):
"""Update the current region in the selected graph and rebuild its plot
:return: Name of the current probe graph ('A', 'B', ...)
Probably outdated comment (TODO):
FUTURE, once the polygon is a layer, this signal will be unnecessary
"""
return self.graphs[self.selected_graph_index].setRegion(
polygon_points=polygon_points, select_full_data=select_full_data
)
def current_graph_has_polygon(self) -> bool:
return self.graphs[self.selected_graph_index].polygon is not None
def update_point_probe(self, probe_name=DEFAULT_POINT_PROBE, xy_pos=None, state=None):
if xy_pos is None and state is None:
if probe_name not in self.point_probes:
# nothing to do
return
# they didn't ask to change anything
# but they may want to refresh stuff
state, xy_pos = self.point_probes[probe_name]
elif probe_name not in self.point_probes:
# new point
if xy_pos is None:
raise ValueError("Point probe '{}' does not exist".format(probe_name))
# if this is a new point probe, then it must be enabled
state = True if state is None else state
else:
old_state, old_xy_pos = self.point_probes[probe_name]
if xy_pos is None:
# state is what is changing
xy_pos = old_xy_pos
elif state is None:
# they are updating the position only
# we have to turn the probe back on
state = True
if old_state == state and old_xy_pos == xy_pos:
# nothing has changed so no need to tell anyone
return
if old_state != state:
LOG.info("Changing point probe '{}' state to '{}'".format(probe_name, "on" if state else "off"))
if old_xy_pos != xy_pos:
LOG.info("Changing point probe '{}' position to '{}'".format(probe_name, xy_pos))
self.point_probes[probe_name] = [state, xy_pos]
self.pointProbeChanged.emit(probe_name, state, xy_pos)
def _update_default_point_probe_graph(self):
probe_name = DEFAULT_POINT_PROBE
point_probe = self.point_probes.get(probe_name, [None, None])
self._update_point_probe_graph(probe_name, *point_probe)
def _update_point_probe_graph(self, probe_name, state, xy_pos):
# need to set the point for all graphs because the point probe
# is used across all plots
for idx, graph in enumerate(self.graphs):
rebuild = idx == self.selected_graph_index
if state:
graph.setPoint(xy_pos, rebuild=rebuild)
elif state is not None:
# if it is False/"off"
graph.setPoint(None, rebuild=rebuild)
def current_point_probe_status(self, probe_name):
if probe_name not in self.point_probes:
return False, None
return self.point_probes[probe_name]
def toggle_point_probe(self, probe_name, state=None):
if probe_name not in self.point_probes:
LOG.info("No point probe to toggle")
return
old_state = self.point_probes[probe_name][0]
state = state if state is not None else not old_state
self.update_point_probe(probe_name, state=state)
def set_default_layer_selections(self, layer_uuids):
"""Set the UUIDs for the current graph if it doesn't have a polygon"""
return self.graphs[self.selected_graph_index].set_default_layer_selections(layer_uuids)
def on_region_probe_tool_selected(self):
if len(self.graphs) > 0:
return
# There is no graph tab yet, we must create one
self.set_up_tab(self.tab_widget_object.count(), do_increment_tab_letter=False)
current_name = self.graphs[self.selected_graph_index].getName()
self.didChangeTab.emit((current_name,))
def _add_tab(self):
LOG.info("Creating new area probe graph tab.")
old_name = self.graphs[self.selected_graph_index].getName()
self.set_up_tab(self.tab_widget_object.count())
# notify everyone that we cloned a polygon (if we did)
if self.graphs[self.selected_graph_index].polygon is not None:
new_name = self.graphs[-1].getName()
self.didClonePolygon.emit(old_name, new_name)
current_name = self.graphs[self.selected_graph_index].getName()
self.didChangeTab.emit((current_name,))
def _handle_tab_change(self):
"""Deal with the fact that the tab changed in the tab widget"""
new_tab_index = self.tab_widget_object.currentIndex()
self.selected_graph_index = new_tab_index
self.graphs[self.selected_graph_index].rebuildPlot()
current_name = self.graphs[self.selected_graph_index].getName()
self.didChangeTab.emit((current_name,))
def _on_auto_update_checkbox_state_changed(self, state):
if self.auto_update_checkbox.isChecked():
self.update_button.setEnabled(False)
self.layer_model.didFinishActivateProductDatasets.connect(self.handleActiveProductDatasetsChanged)
self.pointProbeChanged.connect(self._update_point_probe_graph)
else:
self.layer_model.didFinishActivateProductDatasets.disconnect(self.handleActiveProductDatasetsChanged)
self.pointProbeChanged.disconnect(self._update_point_probe_graph)
self.update_button.setEnabled(True)
class ProbeGraphDisplay(object):
"""The ProbeGraphDisplay controls one tab of the Area Probe Graphs.
The ProbeGraphDisplay handles generating a displaying a single graph.
"""
# the most data we are willing to plot in a scatter plot
# this limit was determined experimentally on Eva's laptop for glance, may need to revisit this
MAX_SCATTER_PLOT_DATA = 1e7
# the default number of bins for the histogram and density scatter plot
DEFAULT_NUM_BINS = 100
def __init__(self, manager, qt_parent, workspace, layer_model: LayerModel, queue, name_str):
"""build the graph tab controls
:param layer_model:
:return:
"""
# hang on to our name
self.myName = name_str
# plotting related controls
self.toolbar = None
self.yCheckBox = None
self.xDropDown = None
self.yDropDown = None
# internal objects to reference for info and data
self.polygon = None
self.point = None
self.full_data_selection = False
# save the workspace and queue for use later
self.manager = manager
self.workspace = workspace
self.layer_model = layer_model
self.queue = queue
# internal values that control the behavior of plotting and controls
self.xSelectedUUID = None
self.ySelectedUUID = None
self.xCurrentDatasetUUID = None
self.yCurrentDatasetUUID = None
self.uuidMap = None # this is needed because the drop downs can't properly handle objects as ids
self._stale = True # whether or not the plot needs to be redrawn
# a figure instance to plot on
self.figure = Figure(figsize=(3, 3), dpi=72)
# this is the Canvas Widget that displays the `figure`
# it takes the `figure` instance as a parameter to __init__
self.canvas = FigureCanvas(self.figure)
self.canvas.setMinimumSize(100, 100)
# make sure our figure is clear
self.clearPlot()
# make a matplotlib toolbar to attach to the graph
self.toolbar = CustomNavigationToolbar(self.canvas, qt_parent)
# create our selection controls
# the label for the x selection
xLabel = QtWidgets.QLabel("X layer:")
# the check box that turns on and off comparing to a y layer
self.yCheckBox = QtWidgets.QCheckBox("vs Y layer:")
self.yCheckBox.setToolTip("Plot X layer data vs Y layer when this is checked.")
self.yCheckBox.stateChanged.connect(self.vsChecked)
# the drop down for selecting the x layer
self.xDropDown = QtWidgets.QComboBox(qt_parent)
self.xDropDown.setToolTip("The X layer data to use for plotting.")
self.xDropDown.activated.connect(self.xSelected)
# the drop down for selecting the y layer
self.yDropDown = QtWidgets.QComboBox(qt_parent)
self.yDropDown.setDisabled(True)
self.yDropDown.setToolTip("The Y layer data to use for plotting.")
self.yDropDown.activated.connect(self.ySelected)
# set the layout
# Note: add in a grid is (widget, row#, col#) or (widget, row#, col#, row_span, col_span)
layout = QtWidgets.QGridLayout()
layout.addWidget(self.toolbar, 1, 1, 1, 3)
layout.addWidget(self.canvas, 2, 1, 1, 3)
layout.addWidget(xLabel, 3, 1)
layout.addWidget(self.xDropDown, 3, 2, 1, 2)
layout.addWidget(self.yCheckBox, 4, 1)
layout.addWidget(self.yDropDown, 4, 2, 1, 2)
qt_parent.setLayout(layout)
def set_possible_layers(self, do_rebuild_plot=False):
"""Given a list of layer UUIDs, set the names and UUIDs in the drop downs"""
# make a uuid map because the mapping in a combo box doesn't work with objects
self.uuidMap = {}
# clear out the current lists
self.xDropDown.clear()
self.yDropDown.clear()
# fill up our lists of layers
for layer in self.layer_model.get_probeable_layers():
uuid_string = str(layer.uuid)
self.xDropDown.addItem(layer.descriptor, uuid_string)
self.yDropDown.addItem(layer.descriptor, uuid_string)
self.uuidMap[uuid_string] = layer.uuid
# if possible, set the selections back to the way they were
need_rebuild = False
x_index = self.xDropDown.findData(str(self.xSelectedUUID))
if x_index >= 0:
# Selection didn't change
self.xDropDown.setCurrentIndex(x_index)
elif self.xDropDown.count() > 0:
# Setting to a new layer
need_rebuild = True
self.xSelectedUUID = self.uuidMap[self.xDropDown.itemData(0)]
self.xDropDown.setCurrentIndex(0)
else:
# we had something selected but now there is nothing new to select
need_rebuild = need_rebuild or self.xSelectedUUID is not None
self.xSelectedUUID = None
y_index = self.yDropDown.findData(str(self.ySelectedUUID))
if y_index >= 0:
# Selection didn't change
self.yDropDown.setCurrentIndex(y_index)
elif self.yDropDown.count() > 0:
# Setting to a new layer
need_rebuild = need_rebuild or self.yCheckBox.isChecked()
self.ySelectedUUID = self.uuidMap[self.yDropDown.itemData(0)]
self.yDropDown.setCurrentIndex(0)
else:
# we had something selected but now there is nothing new to select
need_rebuild = need_rebuild or self.ySelectedUUID is not None
self.ySelectedUUID = None
need_rebuild |= self._check_active_datasets_changed()
# refresh the plot
self._stale = need_rebuild
if do_rebuild_plot:
# Rebuild the plot (stale is used to determine if actual rebuild
# is needed)
self.rebuildPlot()
def _check_active_datasets_changed(self):
# check whether active datasets have changed. If so, update stored
# dataset uuids and indicate that the graph needs to be rebuilt.
need_rebuild = False
x_layer = self.layer_model.get_layer_by_uuid(self.xSelectedUUID)
x_active_product_dataset = None if not x_layer else x_layer.get_first_active_product_dataset()
if not x_active_product_dataset:
if self.xCurrentDatasetUUID is not None:
need_rebuild = True
self.xCurrentDatasetUUID = None
elif x_active_product_dataset.uuid != self.xCurrentDatasetUUID:
need_rebuild = True
self.xCurrentDatasetUUID = x_active_product_dataset.uuid
y_layer = self.layer_model.get_layer_by_uuid(self.ySelectedUUID)
y_active_product_dataset = None if not y_layer else y_layer.get_first_active_product_dataset()
if not y_active_product_dataset:
if self.yCurrentDatasetUUID is not None:
need_rebuild |= self.yCheckBox.isChecked()
self.yCurrentDatasetUUID = None
elif y_active_product_dataset.uuid != self.yCurrentDatasetUUID:
need_rebuild |= self.yCheckBox.isChecked()
self.yCurrentDatasetUUID = y_active_product_dataset.uuid
return need_rebuild
def set_default_layer_selections(self, layer_uuids):
# only set the defaults if we don't have a polygon yet
if self.polygon is not None:
return
if len(layer_uuids) >= 1:
xIndex = self.xDropDown.findData(str(layer_uuids[0]))
if xIndex >= 0:
self.xDropDown.setCurrentIndex(xIndex)
self.xSelectedUUID = layer_uuids[0]
else:
LOG.error("Tried to set probe graph to non-existent layer: %s", layer_uuids[0])
if len(layer_uuids) >= 2:
yIndex = self.xDropDown.findData(str(layer_uuids[1]))
if yIndex >= 0:
self.yDropDown.setCurrentIndex(yIndex)
self.ySelectedUUID = layer_uuids[1]
else:
LOG.error("Tried to set probe graph to non-existent layer: %s", layer_uuids[1])
@property
def checked(self):
return self.yCheckBox.isChecked()
@checked.setter
def checked(self, is_checked):
return self.yCheckBox.setChecked(is_checked)
def xSelected(self):
"""The user selected something in the X layer list."""
oldXStr = str(self.xSelectedUUID)
newXStr = self.xDropDown.itemData(self.xDropDown.currentIndex())
self.xSelectedUUID = self.uuidMap[newXStr]
# regenerate the plot
if oldXStr != newXStr:
self._stale = True
self.rebuildPlot()
def ySelected(self):
"""The user selected something in the Y layer list."""
oldYStr = str(self.ySelectedUUID)
newYStr = self.yDropDown.itemData(self.yDropDown.currentIndex())
self.ySelectedUUID = self.uuidMap[newYStr]
# regenerate the plot
if (oldYStr != newYStr) and self.yCheckBox.isChecked():
self._stale = True
self.rebuildPlot()
def vsChecked(self):
"""The vs check box was checked!"""
# look at the state of the vs box and enable/disable the y drop down accordingly
doPlotVS = self.yCheckBox.isChecked()
self.yDropDown.setDisabled(not doPlotVS)
# regenerate the plot
self._stale = True
self.rebuildPlot()
def setRegion(self, polygon_points=None, select_full_data=False):
"""Set the region for this graph as polygon selection or full data."""
assert polygon_points is None or not select_full_data, ( # nosec B101
"Must not give both 'polygon_points' and True for 'select_full_data':"
" Defining region by polygon and as full data are mutually exclusive."
)
# Even with assertions switched off we will get a valid state here: a
# polygonal region wins over the full data selection: the first one will
# have a visual echo, the second one not, so this is more likely to give
# a consistent state.
self.polygon = polygon_points
self.full_data_selection = False if self.polygon is not None else select_full_data
# regenerate the plot
self._stale = True
self.rebuildPlot()
# return our name to be used for the polygon name
return self.myName
def setPoint(self, coordinates, rebuild=True):
self.point = coordinates
self._stale = True
# sometimes we set the point to be redrawn later
if rebuild:
self.rebuildPlot()
def getName(self):
"""Accessor method for the graph's name."""
return self.myName
def rebuildPlot(self):
"""Given what we currently know about the selection area and selected bands, rebuild our plot
Note: This should be called only when the selections change in some way.
"""
if not self._stale:
LOG.debug("Plot doesn't need to be rebuilt")
return
# should be plotting vs Y?
doPlotVS = self.yCheckBox.isChecked()
task_name = "%s_%s_region_plotting" % (self.xSelectedUUID, self.ySelectedUUID)
task_description = (
"Creating plot for full data" if self.full_data_selection else "Creating plot for region probe data"
)
self.queue.add(
task_name,
self._rebuild_plot_task(
self.xSelectedUUID,
self.ySelectedUUID,
self.polygon,
self.point,
plot_versus=doPlotVS,
plot_full_data=self.full_data_selection,
),
task_description,
interactive=True,
)
# Assume that the task gets resolved otherwise we might try to draw multiple times
self._stale = False
def _rebuild_plot_task( # noqa: C901
self, x_layer_uuid, y_layer_uuid, polygon, point_xy, plot_versus=False, plot_full_data=True
):
data_source_description = "full data" if plot_full_data else "polygon data"
x_layer = self.layer_model.get_layer_by_uuid(x_layer_uuid)
x_active_product_dataset = None if not x_layer else x_layer.get_first_active_product_dataset()
x_uuid = None if not x_active_product_dataset else x_active_product_dataset.uuid
y_layer = self.layer_model.get_layer_by_uuid(y_layer_uuid)
y_active_product_dataset = None if not y_layer else y_layer.get_first_active_product_dataset()
y_uuid = None if not y_active_product_dataset else y_active_product_dataset.uuid
# if we are plotting only x and we have a selected x and a polygon
have_x_layer = x_layer_uuid is not None
have_y_layer = y_layer_uuid is not None
should_plot = polygon is not None or plot_full_data
if not plot_versus and have_x_layer and should_plot:
yield {TASK_DOING: f"Probe Plot: Collecting {data_source_description}...", TASK_PROGRESS: 0.0}
# get the data and info we need for this plot
if x_active_product_dataset:
if plot_full_data:
data_polygon = self.workspace.get_content(x_active_product_dataset.uuid)
else:
data_polygon = self.workspace.get_content_polygon(x_active_product_dataset.uuid, polygon)
else:
data_polygon = np.array([])
x_conv_func = x_layer.info[Info.UNIT_CONVERSION][1]
data_polygon = x_conv_func(data_polygon)
time = x_active_product_dataset.info[Info.DISPLAY_TIME]
title = f"{time}"
x_axis_label = x_layer.descriptor
y_axis_label = "Count of data points"
# get point probe value
if x_active_product_dataset and point_xy:
x_point = self.workspace.get_content_point(x_active_product_dataset.uuid, point_xy)
x_point = x_conv_func(x_point)
else:
x_point = None
# plot a histogram
yield {TASK_DOING: "Probe Plot: Creating histogram plot", TASK_PROGRESS: 0.25}
self.plotHistogram(data_polygon, title, x_point, x_axis_label, y_axis_label)
# if we are plotting x vs y and have x, y, and a polygon
elif plot_versus and have_x_layer and have_y_layer and should_plot:
yield {TASK_DOING: f"Probe Plot: Collecting {data_source_description} (layer 1)...", TASK_PROGRESS: 0.0}
name1 = x_layer.descriptor
name2 = y_layer.descriptor
if not x_active_product_dataset or not y_active_product_dataset:
x_point = None
y_point = None
time1 = None
time2 = None
data1 = np.array([0])
data2 = np.array([0])
else:
# get the data and info we need for this plot
x_info = x_active_product_dataset.info
y_info = y_active_product_dataset.info
time1 = x_info[Info.DISPLAY_TIME]
time2 = y_info[Info.DISPLAY_TIME]
hires_uuid = self.workspace.lowest_resolution_uuid(x_uuid, y_uuid)
# hires_coord_mask are the lat/lon coordinates of each of the
# pixels in hires_data. The coordinates are (lat, lon) to resemble
# the (Y, X) indexing of numpy arrays
if plot_full_data:
hires_coord_mask = None
hires_data = self.workspace.get_content(hires_uuid)
else:
hires_coord_mask, hires_data = self.workspace.get_coordinate_mask_polygon(hires_uuid, polygon)
x_conv_func = x_layer.info[Info.UNIT_CONVERSION][1]
y_conv_func = y_layer.info[Info.UNIT_CONVERSION][1]
yield {
TASK_DOING: f"Probe Plot: Collecting {data_source_description} (layer 2)...",
TASK_PROGRESS: 0.15,
}
if hires_uuid == x_uuid:
# the hires data was from the X UUID
data1 = x_conv_func(hires_data)
if plot_full_data:
data2 = self.workspace.get_content(y_uuid)
else:
data2 = self.workspace.get_content_coordinate_mask(y_uuid, hires_coord_mask)
data2 = y_conv_func(data2)
else:
# the hires data was from the Y UUID
data2 = y_conv_func(hires_data)
if plot_full_data:
data1 = self.workspace.get_content(x_uuid)
else:
data1 = self.workspace.get_content_coordinate_mask(x_uuid, hires_coord_mask)
data1 = x_conv_func(data1)
yield {TASK_DOING: "Probe Plot: Creating scatter plot...", TASK_PROGRESS: 0.25}
if point_xy:
x_point = self.workspace.get_content_point(x_uuid, point_xy)
x_point = x_conv_func(x_point)
y_point = self.workspace.get_content_point(y_uuid, point_xy)
y_point = y_conv_func(y_point)
else:
x_point = None
y_point = None
# plot a scatter plot
good_mask = ~(np.isnan(data1) | np.isnan(data2))
data1 = data1[good_mask]
data2 = data2[good_mask]
self.plotDensityScatterplot(data1, name1, time1, data2, name2, time2, x_point, y_point)
# if we have some combination of selections we don't understand, clear the figure
else:
yield {TASK_DOING: "Probe Plot: Clearing plot figure...", TASK_PROGRESS: 0.0}
self.clearPlot()
yield {TASK_DOING: "Probe Plot: Drawing plot...", TASK_PROGRESS: 0.95}
self.manager.drawChildGraph.emit(self.myName)
yield {TASK_DOING: "Probe Plot: Done", TASK_PROGRESS: 1.0}
def _draw(self):
self.canvas.draw()
def plotHistogram(self, data, title, x_point, x_label, y_label, numBins=100):
"""Make a histogram using the given data and label it with the given title"""
self.figure.clf()
axes = self.figure.add_subplot(111)
bars = axes.hist(data[~np.isnan(data)], bins=self.DEFAULT_NUM_BINS)
if x_point is not None:
# go through each rectangle object and make the one that contains x_point 'red'
# default color is blue so red should stand out
for bar in bars[2][::-1]:
if bar.xy[0] <= x_point:
bar.set_color("red")
break
axes.set_title(title)
axes.set_xlabel(x_label)
axes.set_ylabel(y_label)
def plotDensityScatterplot(self, dataX, nameX, timeX, dataY, nameY, timeY, pointX, pointY):
"""Make a density scatter plot for the given data
:param timeX:
:param timeY:
"""
# clear the figure and make a new subplot
self.figure.clf()
axes = self.figure.add_subplot(111)
# figure out the range of the data
# you might not be comparing the same units
xmin_value = np.min(dataX)
xmax_value = np.max(dataX)
ymin_value = np.min(dataY)
ymax_value = np.max(dataY)
# bounds should be defined in the form [[xmin, xmax], [ymin, ymax]]
bounds = [[xmin_value, xmax_value], [ymin_value, ymax_value]]
# make the binned density map for this data set
density_map, _, _ = np.histogram2d(dataX, dataY, bins=self.DEFAULT_NUM_BINS, range=bounds)
# mask out zero counts; flip because y goes the opposite direction in an imshow graph
density_map = np.flipud(np.transpose(np.ma.masked_array(density_map, mask=density_map == 0)))
# display the density map data
img = axes.imshow(
density_map,
extent=[xmin_value, xmax_value, ymin_value, ymax_value],
aspect="auto",
interpolation="nearest",
norm=LogNorm(),
)
if pointX is not None:
axes.set_autoscale_on(False)
axes.plot(
pointX,
pointY,
marker="o",
markerfacecolor="white",
markeredgecolor="black",
markersize=10,
markeredgewidth=1.0,
)
axes.set_autoscale_on(True)
colorbar = self.figure.colorbar(img)
colorbar.set_label("log(count of data points)")
# set the various text labels
axes.set_xlabel(f"{nameX}")
axes.set_ylabel(f"{nameY}")
axes.set_title(timeX)
# draw the x vs y line
self._draw_xy_line(axes)
def clearPlot(self):
"""Clear our plot"""
self.full_data_selection = False
self.figure.clf()
def _draw_xy_line(self, axes):
# get the bounds for our calculations and so we can reset the viewing window later
x_bounds = axes.get_xbound()
y_bounds = axes.get_ybound()
# draw the x=y line
perfect = [max(x_bounds[0], y_bounds[0]), min(x_bounds[1], y_bounds[1])]
axes.plot(perfect, perfect, "--", color="k", label="X = Y")
# reset the bounds
axes.set_xbound(x_bounds)
axes.set_ybound(y_bounds)
| ssec/sift | uwsift/view/probes.py | probes.py | py | 34,849 | python | en | code | 45 | github-code | 36 | [
{
"api_name": "matplotlib.backends.qt_editor.figureoptions",
"line_number": 26,
"usage_type": "name"
},
{
"api_name": "logging.getLogger",
"line_number": 28,
"usage_type": "call"
},
{
"api_name": "matplotlib.backends.backend_qt5agg.NavigationToolbar2QT",
"line_number": 32,
... |
19499646717 | # -*- coding: utf-8 -*-
import sys
from PyQt4 import QtCore, QtGui, uic
from PyQt4.QtGui import QApplication, QMainWindow, QWidget, QPushButton, QDialog, QMessageBox, QTableWidgetItem, QListWidgetItem
from PyQt4.QtCore import QString, QSettings
from config import user, password, host, db_name
import pymysql
import db
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
connection = pymysql.connect(
user=user,
password=password,
host=host,
port=3306,
database=db_name,
cursorclass=pymysql.cursors.DictCursor
)
class Auth(QMainWindow):
def __init__(self):
super(Auth, self).__init__()
uic.loadUi('UI/auth.ui', self)
self.pushButton.clicked.connect(self.loginfunction)
self.pushButton_2.clicked.connect(self.show_register_window)
self.pushButton_3.clicked.connect(self.close)
self.checkBox_2.stateChanged.connect(self.show_password)
self.checkBox.stateChanged.connect(self.remember_me)
self.label_2.mousePressEvent = self.show_password_window
self.lineEdit_2.setEchoMode(QtGui.QLineEdit.Password)
self.lineEdit.clearFocus()
def show_register_window(self):
self.register_window = Register()
self.register_window.show()
def show_password_window(self, event):
self.password_window = ForgetPassword()
self.password_window.show()
def show_phonebook_window(self, username):
self.phonebook_window = PhoneBook(username)
self.phonebook_window.show()
self.close()
def loginfunction(self):
global connection
lgn = self.lineEdit.text().toUtf8()
psw = self.lineEdit_2.text().toUtf8()
if not db.auth(connection,lgn,psw):
error = QMessageBox()
error.setWindowTitle(u'Ошибка')
error.setText(u'Неверная пара логин-пароль')
error.setIcon(QMessageBox.Warning)
error.setStandardButtons(QMessageBox.Ok)
error.exec_()
else:
global SETTINGS
lgn = str(lgn)
if SETTINGS.value('remember_me') != False:
SETTINGS.setValue('user', lgn)
self.show_phonebook_window(lgn)
def show_password(self, state):
if state == QtCore.Qt.Checked:
self.lineEdit_2.setEchoMode(QtGui.QLineEdit.Normal)
else:
self.lineEdit_2.setEchoMode(QtGui.QLineEdit.Password)
def remember_me(self, state):
global SETTINGS
if state == QtCore.Qt.Checked:
SETTINGS.setValue('remember_me', True)
else:
SETTINGS.setValue('remember_me', False)
class Register(QMainWindow):
def __init__(self):
super(Register, self).__init__()
uic.loadUi('UI/register.ui', self)
self.pushButton.clicked.connect(self.registerfunction)
self.lineEdit_2.setEchoMode(QtGui.QLineEdit.Password)
self.lineEdit_3.setEchoMode(QtGui.QLineEdit.Password)
self.pushButton_2.clicked.connect(self.close)
def registerfunction(self):
global connection
lgn = self.lineEdit.text().toUtf8()
psw = self.lineEdit_2.text().toUtf8()
psw2 = self.lineEdit_3.text().toUtf8()
birthday = str(self.dateEdit.dateTime().toString('yyyy-MM-dd'))
if not (lgn and psw and psw2):
error = QMessageBox()
error.setWindowTitle(u'Ошибка')
error.setText(u'Все поля должны быть заполнены')
error.setIcon(QMessageBox.Warning)
error.setStandardButtons(QMessageBox.Ok)
error.exec_()
else:
if psw != psw2:
error = QMessageBox()
error.setWindowTitle(u'Ошибка')
error.setText(u'Пароли не совпадают')
error.setIcon(QMessageBox.Warning)
error.setStandardButtons(QMessageBox.Ok)
error.exec_()
else:
if not db.register(connection,lgn,psw, birthday):
error = QMessageBox()
error.setWindowTitle(u'Ошибка')
error.setText(u'Этот логин уже зарегистрирован')
error.setIcon(QMessageBox.Warning)
error.setStandardButtons(QMessageBox.Ok)
error.exec_()
else:
success = QMessageBox()
success.setWindowTitle(u'Успех')
success.setText(u'Вы успешно зарегистрированы')
success.setIcon(QMessageBox.Information)
success.setStandardButtons(QMessageBox.Ok)
success.exec_()
self.close()
class ForgetPassword(QMainWindow):
def __init__(self):
super(ForgetPassword, self).__init__()
uic.loadUi('UI/password.ui', self)
self.pushButton_2.clicked.connect(self.close)
self.pushButton_3.clicked.connect(self.close)
class PhoneBook(QMainWindow):
def __init__(self, login):
super(PhoneBook, self).__init__()
uic.loadUi('UI/phonebook.ui', self)
self.username = login
self.label_2.setText(_translate("MainWindow", self.username, None))
self.pushButton.clicked.connect(self.add_contact)
self.pushButton_2.clicked.connect(self.edit_contact)
self.pushButton_3.clicked.connect(self.delete_contact)
self.pushButton_4.clicked.connect(self.show_table)
self.pushButton_5.clicked.connect(self.exit)
self.show()
self.show_table()
self.show_birthdays()
def show_birthdays(self):
global connection
birthdays = db.week_birthday(connection, self.username)
if birthdays:
success = QMessageBox()
success.setWindowTitle(u'Дни рождения')
items = _translate("MainWindow", 'Дни рождения на ближайшую неделю: \n', None)
for contact in birthdays:
items += _translate("MainWindow", contact['last_name'], None)+' '+_translate("MainWindow", contact['first_name'], None)+': '+str(contact['birthday'])+'\n'
success.setText(items)
success.setIcon(QMessageBox.Information)
success.setStandardButtons(QMessageBox.Ok)
success.exec_()
else:
success = QMessageBox()
success.setWindowTitle(u'Дни рождения')
success.setText(u'Нет дней рождения на этой неделе')
success.setIcon(QMessageBox.Information)
success.setStandardButtons(QMessageBox.Ok)
success.exec_()
def exit(self):
global SETTINGS
SETTINGS.setValue('remember_me', False)
SETTINGS.setValue('user', None)
self.auth_window = Auth()
self.auth_window.show()
self.close()
def add_contact(self):
self.add_contact_window = AddContact(self.username)
self.add_contact_window.show()
def delete_contact(self):
self.delete_contact_window = DeleteContact(self.username)
self.delete_contact_window.show()
def edit_contact(self):
self.edit_contact_window = EditContact(self.username)
self.edit_contact_window.show()
def get_contacts(self, username):
global connection
return db.get_contacts(connection, username)
def show_table(self):
contacts = self.get_contacts(self.username)
self.tableWidget.setRowCount(0)
self.tableWidget_2.setRowCount(0)
self.tableWidget_3.setRowCount(0)
self.tableWidget_4.setRowCount(0)
self.tableWidget_5.setRowCount(0)
self.tableWidget_6.setRowCount(0)
self.tableWidget_7.setRowCount(0)
self.tableWidget_8.setRowCount(0)
self.tableWidget_9.setRowCount(0)
self.tableWidget_10.setRowCount(0)
self.tableWidget_11.setRowCount(0)
self.tableWidget_12.setRowCount(0)
self.tableWidget_13.setRowCount(0)
self.tableWidget_14.setRowCount(0)
for row_data in contacts:
name = row_data['last_name'] +' '+row_data['first_name']
phone_number = row_data['phone_number']
birthday = row_data['birthday']
i1 = i2 = i3 = i4 = i5 = i6 = i7 = i8 = i9 = i10 = i11 = i12 = i13 = i14 = 0
letter = name[:2]
if (letter == 'А') or (letter == 'а') or (letter == 'Б') or (letter == 'б'):
self.tableWidget.insertRow(i1)
self.tableWidget.setItem(i1, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget.setItem(i1, 1, QTableWidgetItem(phone_number))
self.tableWidget.setItem(i1, 2, QTableWidgetItem(str(birthday)))
i1 += 1
elif (letter == 'В') or (letter == 'в') or (letter == 'Г') or (letter == 'г'):
self.tableWidget_2.insertRow(i2)
self.tableWidget_2.setItem(i2, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_2.setItem(i2, 1, QTableWidgetItem(phone_number))
self.tableWidget_2.setItem(i2, 2, QTableWidgetItem(str(birthday)))
i2 += 1
elif (letter == 'Д') or (letter == 'д') or (letter == 'Е') or (letter == 'е'):
self.tableWidget_3.insertRow(i3)
self.tableWidget_3.setItem(i3, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_3.setItem(i3, 1, QTableWidgetItem(phone_number))
self.tableWidget_3.setItem(i3, 2, QTableWidgetItem(str(birthday)))
i3 += 1
elif (letter == 'Ж') or (letter == 'ж') or (letter == 'З') or (letter == 'з') or (letter == 'И') or (letter == 'и') or (letter == 'Й') or (letter == 'й'):
self.tableWidget_4.insertRow(i4)
self.tableWidget_4.setItem(i4, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_4.setItem(i4, 1, QTableWidgetItem(phone_number))
self.tableWidget_4.setItem(i4, 2, QTableWidgetItem(str(birthday)))
i4 += 1
elif (letter == 'К') or (letter == 'к') or (letter == 'Л') or (letter == 'л'):
self.tableWidget_5.insertRow(i5)
self.tableWidget_5.setItem(i5, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_5.setItem(i5, 1, QTableWidgetItem(phone_number))
self.tableWidget_5.setItem(i5, 2, QTableWidgetItem(str(birthday)))
i5 += 1
elif (letter == 'М') or (letter == 'м') or (letter == 'Н') or (letter == 'н'):
self.tableWidget_6.insertRow(i6)
self.tableWidget_6.setItem(i6, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_6.setItem(i6, 1, QTableWidgetItem(phone_number))
self.tableWidget_6.setItem(i6, 2, QTableWidgetItem(str(birthday)))
i6 += 1
elif (letter == 'О') or (letter == 'о') or (letter == 'П') or (letter == 'п'):
self.tableWidget_7.insertRow(i7)
self.tableWidget_7.setItem(i7, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_7.setItem(i7, 1, QTableWidgetItem(phone_number))
self.tableWidget_7.setItem(i7, 2, QTableWidgetItem(str(birthday)))
i7 += 1
elif (letter == 'Р') or (letter == 'р') or (letter == 'С') or (letter == 'с'):
self.tableWidget_8.insertRow(i8)
self.tableWidget_8.setItem(i8, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_8.setItem(i8, 1, QTableWidgetItem(phone_number))
self.tableWidget_8.setItem(i8, 2, QTableWidgetItem(str(birthday)))
i8 += 1
elif (letter == 'Т') or (letter == 'т') or (letter == 'У') or (letter == 'у'):
self.tableWidget_13.insertRow(i9)
self.tableWidget_13.setItem(i9, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_13.setItem(i9, 1, QTableWidgetItem(phone_number))
self.tableWidget_13.setItem(i9, 2, QTableWidgetItem(str(birthday)))
i9 += 1
elif (letter == 'Ф') or (letter == 'ф') or (letter == 'Х') or (letter == 'х'):
self.tableWidget_9.insertRow(i10)
self.tableWidget_9.setItem(i10, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_9.setItem(i10, 1, QTableWidgetItem(phone_number))
self.tableWidget_9.setItem(i10, 2, QTableWidgetItem(str(birthday)))
i10 += 1
elif (letter == 'Ц') or (letter == 'ц') or (letter == 'Ч') or (letter == 'ч') or (letter == 'Ш') or (letter == 'ш') or (letter == 'Щ') or (letter == 'щ'):
self.tableWidget_10.insertRow(i11)
self.tableWidget_10.setItem(i11, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_10.setItem(i11, 1, QTableWidgetItem(phone_number))
self.tableWidget_10.setItem(i11, 2, QTableWidgetItem(str(birthday)))
i11 += 1
elif (letter == 'Ъ') or (letter == 'ъ') or (letter == 'Ы') or (letter == 'ы') or (letter == 'Ь') or (letter == 'ь'):
self.tableWidget_11.insertRow(i12)
self.tableWidget_11.setItem(i12, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_11.setItem(i12, 1, QTableWidgetItem(phone_number))
self.tableWidget_11.setItem(i12, 2, QTableWidgetItem(str(birthday)))
i12 += 1
elif (letter == 'Э') or (letter == 'э') or (letter == 'Ю') or (letter == 'ю') or (letter == 'Я') or (letter == 'я'):
self.tableWidget_12.insertRow(i13)
self.tableWidget_12.setItem(i13, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_12.setItem(i13, 1, QTableWidgetItem(phone_number))
self.tableWidget_12.setItem(i13, 2, QTableWidgetItem(str(birthday)))
i13 += 1
else:
self.tableWidget_14.insertRow(i14)
self.tableWidget_14.setItem(i14, 0, QTableWidgetItem(_translate("MainWindow", name, None)))
self.tableWidget_14.setItem(i14, 1, QTableWidgetItem(phone_number))
self.tableWidget_14.setItem(i14, 2, QTableWidgetItem(str(birthday)))
i14 += 1
class AddContact(QWidget):
def __init__(self, login):
super(AddContact, self).__init__()
uic.loadUi('UI/add.ui', self)
self.username = login
self.pushButton.clicked.connect(self.addfunction)
def addfunction(self):
global connection
first_name = self.lineEdit.text().toUtf8()
last_name = self.lineEdit_2.text().toUtf8()
phone_number = self.lineEdit_3.text().toUtf8()
birthday = str(self.dateEdit.dateTime().toString('yyyy-MM-dd'))
if not (first_name and last_name and phone_number):
error = QMessageBox()
error.setWindowTitle(u'Ошибка')
error.setText(u'Все поля должны быть заполнены')
error.setIcon(QMessageBox.Warning)
error.setStandardButtons(QMessageBox.Ok)
error.exec_()
else:
if not db.add_contact(connection, self.username, first_name, last_name, phone_number, birthday):
error = QMessageBox()
error.setWindowTitle(u'Ошибка')
error.setText(u'Такой контакт уже зарегистрирован')
error.setIcon(QMessageBox.Warning)
error.setStandardButtons(QMessageBox.Ok)
error.exec_()
else:
success = QMessageBox()
success.setWindowTitle(u'Успех')
success.setText(u'Контакт успешно добавлен')
success.setIcon(QMessageBox.Information)
success.setStandardButtons(QMessageBox.Ok)
success.exec_()
self.close()
class DeleteContact(QWidget):
def __init__(self, login):
super(DeleteContact, self).__init__()
uic.loadUi('UI/delete.ui', self)
self.username = login
self.pushButton.clicked.connect(self.delete_item)
global connection
contacts = db.get_contacts(connection, self.username)
for row_data in contacts:
name = row_data['last_name'] +' '+row_data['first_name']
phone_number = row_data['phone_number']
self.listWidget.addItem(_translate("MainWindow", name, None) + ': ' + phone_number)
def delete_item(self):
if not self.listWidget.currentItem():
error = QMessageBox()
error.setWindowTitle(u'Ошибка')
error.setText(u'Выберите контакт для удаления')
error.setIcon(QMessageBox.Warning)
error.setStandardButtons(QMessageBox.Ok)
error.exec_()
else:
global connection
data = self.listWidget.currentItem().text().toUtf8()
data = str(data).split(': ')
db.delete_contact(connection, self.username, data[0], data[1])
success = QMessageBox()
success.setWindowTitle(u'Успех')
success.setText(u'Контакт успешно удален')
success.setIcon(QMessageBox.Information)
success.setStandardButtons(QMessageBox.Ok)
success.exec_()
self.close()
class EditContact(QWidget):
def __init__(self, login):
super(EditContact, self).__init__()
uic.loadUi('UI/edit.ui', self)
self.username = login
self.listWidget.itemClicked.connect(self.item_clicked)
self.pushButton.clicked.connect(self.edit_item)
global connection
contacts = db.get_contacts(connection, self.username)
for row_data in contacts:
name = row_data['last_name'] +' '+row_data['first_name']
phone_number = row_data['phone_number']
self.listWidget.addItem(_translate("MainWindow", name, None) + ': ' + phone_number)
def item_clicked(self, item):
data = self.listWidget.currentItem().text().toUtf8()
data = str(data).split(': ')
contact_info = db.find_contact(connection, self.username, data[0], data[1])
self.lineEdit.setText(_translate("MainWindow", contact_info['first_name'], None))
self.lineEdit_2.setText(_translate("MainWindow", contact_info['last_name'], None))
self.lineEdit_3.setText(contact_info['phone_number'])
self.dateEdit.setDate(contact_info['birthday'])
self.contact_id = contact_info['id']
def edit_item(self):
global connection
first_name = self.lineEdit.text().toUtf8()
last_name = self.lineEdit_2.text().toUtf8()
phone_number = str(self.lineEdit_3.text())
birthday = str(self.dateEdit.dateTime().toString('yyyy-MM-dd'))
if not (first_name and last_name and phone_number):
error = QMessageBox()
error.setWindowTitle(u'Ошибка')
error.setText(u'Выберите контакт для редактирования')
error.setIcon(QMessageBox.Warning)
error.setStandardButtons(QMessageBox.Ok)
error.exec_()
else:
db.edit_contact(connection, self.username, self.contact_id, first_name, last_name, phone_number, birthday)
success = QMessageBox()
success.setWindowTitle(u'Успех')
success.setText(u'Контакт успешно изменен')
success.setIcon(QMessageBox.Information)
success.setStandardButtons(QMessageBox.Ok)
success.exec_()
self.close()
SETTINGS = QSettings('app', 'dev')
if not SETTINGS.contains('remember_me'):
SETTINGS.setValue('remember_me', False)
if not SETTINGS.contains('user'):
SETTINGS.setValue('user', None)
def application():
app = QtGui.QApplication(sys.argv)
window = None
global SETTINGS
if SETTINGS.value('remember_me') != False:
if SETTINGS.value('user') != None:
username = SETTINGS.value('user').toByteArray()
window = PhoneBook(username)
else:
window = Auth()
else:
window = Auth()
window.show()
sys.exit(app.exec_())
if __name__ == '__main__':
application() | den4ik-kovalev/phone_book | phone_book/app.py | app.py | py | 21,771 | python | en | code | 0 | github-code | 36 | [
{
"api_name": "PyQt4.QtGui.QApplication",
"line_number": 14,
"usage_type": "attribute"
},
{
"api_name": "PyQt4.QtGui",
"line_number": 14,
"usage_type": "name"
},
{
"api_name": "PyQt4.QtGui.QApplication.translate",
"line_number": 16,
"usage_type": "call"
},
{
"api_... |
21672257496 | import os
from pathlib import Path
import secrets
import uuid
from PIL import Image
from flask import Flask, render_template, redirect, url_for, flash, request,send_file,send_from_directory
from flask_bootstrap import Bootstrap
from flask_wtf import FlaskForm
from flask_wtf.file import FileAllowed,FileField
from wtforms import StringField, PasswordField, BooleanField, IntegerField, DateField, SelectField, SubmitField, TextAreaField, FileField
from wtforms.validators import InputRequired, Email, Length, length, DataRequired, EqualTo
from flask_sqlalchemy import SQLAlchemy
from werkzeug.security import generate_password_hash, check_password_hash
from werkzeug.utils import secure_filename
from flask_login import LoginManager, UserMixin, login_user, login_required, logout_user, current_user
from flask_mysqldb import MySQL
from flask_dropzone import Dropzone
import smtplib
from email.mime.text import MIMEText
app = Flask(__name__)
app.config['SECRET_KEY'] = 'Authorised Personnel Only.' # set the database directory
#app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////mnt/c/Users/calvi/OneDrive/Documents/CS3305/Team9/test.db'
#app.config[
# 'SQLALCHEMY_DATABASE_URI'] = 'mysql://seintu:0mYkNrVI0avq@mysql.netsoc.co/seintu_project2' # set the database directory
Bootstrap(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'signin'
SQLALCHEMY_DATABASE_URI = "mysql://Johnnyos1304:netsoc101@Johnnyos1304.mysql.pythonanywhere-services.com/Johnnyos1304$project"
app.config["SQLALCHEMY_DATABASE_URI"] = SQLALCHEMY_DATABASE_URI
app.config["SQLALCHEMY_POOL_RECYCLE"] = 299
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db = SQLAlchemy(app)
#setup for proposal call form
app.config["MYSQL_HOST"] = "Johnnyos1304.mysql.pythonanywhere-services.com"
app.config["MYSQL_USER"] = "Johnnyos1304"
app.config["MYSQL_PASSWORD"] = "netsoc101"
app.config["MYSQL_DB"] = "Johnnyos1304$project"
mysql = MySQL(app)
mysql.init_app(app)
class User(UserMixin, db.Model):
# this is the user login class that corresponds to the database
__tablename__ = 'Researcher' # the table name in the database is called Researcher
# the following variables correspond to the columns in the Researcher table
orcid = db.Column('orcid', db.Integer, primary_key=True, unique=True)
first_name = db.Column('FirstName', db.String(20))
last_name = db.Column('LastName', db.String(20))
email = db.Column('email', db.String(50), unique=True)
password = db.Column('password', db.String(80))
job = db.Column('job', db.String(255))
prefix = db.Column('prefix', db.String(20))
suffix = db.Column('suffix', db.String(20))
phone = db.Column('phone', db.Integer)
phone_extension = db.Column('PhoneExtension', db.Integer)
type = db.Column('Type', db.String(20))
education = db.relationship('Education', backref='Researcher')
employment = db.relationship('Employment', backref='Researcher')
societies = db.relationship('Societies', backref='Researcher')
awards = db.relationship('Awards', backref='Researcher')
funding = db.relationship('Funding', backref='Researcher')
team_members = db.relationship('TeamMembers', backref='Researcher')
impacts = db.relationship('Impacts', backref='Researcher')
inno_and_comm = db.relationship('InnovationAndCommercialisation', backref='Researcher')
publications = db.relationship('Publications', backref='Researcher')
presentations = db.relationship('Presentations', backref='Researcher')
collab = db.relationship('Collaborations', backref='Researcher')
organised_events = db.relationship('OrganisedEvents', backref='Researcher')
edu_and_public_engagement = db.relationship('EducationAndPublicEngagement', backref='Researcher')
submission = db.relationship('Submissions', backref='Researcher')
ExternalReview = db.relationship('ExternalReview',backref='Researcher')
reports = db.relationship('Report', backref='Researcher')
def __init__(self, orcid, first_name, last_name, email, password, job, prefix, suffix, phone, phone_extension, type):
# this initialises the class and maps the variables to the table (done by flask automatically)
self.orcid = orcid
self.first_name = first_name
self.last_name = last_name
self.email = email
self.password = password
self.job = job
self.prefix = prefix
self.suffix = suffix
self.phone = phone
self.phone_extension = phone_extension
self.type = type
def get_orcid(self):
return self.orcid
def get_id(self):
# this overrides the method get_id() so that it returns the orcid instead of the default id attribute in UserMixIn
return self.orcid
class Proposal(db.Model):
__tablename__ = "Proposal"
Deadline = db.Column(db.Date, nullable=False)
title = db.Column(db.String(100),nullable=False)
TextOfCall = db.Column(db.String(1000), nullable=False)
TargetAudience = db.Column(db.String(500), nullable=False)
EligibilityCriteria = db.Column(db.String(1000), nullable=False)
Duration = db.Column(db.Integer, nullable=False)
ReportingGuidelines = db.Column(db.String(1000), nullable=False)
TimeFrame = db.Column(db.String(200), nullable=False)
picture = db.Column(db.String(200),nullable=True)
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
def __init__(self, Deadline, title, TextOfCall, TargetAudience, EligibilityCriteria, Duration, ReportingGuidelines, TimeFrame):
self.Deadline = Deadline
self.title = title
self.TextOfCall = TextOfCall
self.TargetAudience = TargetAudience
self.EligibilityCriteria = EligibilityCriteria
self.Duration = Duration
self.ReportingGuidelines = ReportingGuidelines
self.TimeFrame = TimeFrame
def __repr__(self):
return f"User('{self.Deadline}', '{self.TargetAudience}', '{self.TimeFrame}')"
class Submissions(db.Model):
__tablename__='Submission'
propid = db.Column(db.Integer,nullable=False)
subid = db.Column(db.Integer,nullable=False, primary_key=True)
title = db.Column(db.Text,nullable=False)
duration = db.Column(db.Integer,nullable=False)
NRP = db.Column(db.String(200),nullable=False)
legal = db.Column(db.Text,nullable=False)
ethicalAnimal = db.Column(db.Text,nullable=False)
ethicalHuman = db.Column(db.Text,nullable=False)
location = db.Column(db.Text,nullable=False)
coapplicants = db.Column(db.Text,nullable=True)
collaborators = db.Column(db.Text,nullable=True)
scientific = db.Column(db.Text,nullable=False)
lay = db.Column(db.Text,nullable=False)
declaration = db.Column(db.Boolean,nullable=False)
user = db.Column(db.Integer, db.ForeignKey('Researcher.orcid') ,nullable=False)
draft = db.Column(db.Boolean, nullable=False, default=True)
proposalPDF = db.Column(db.String(255),nullable=False)
status = db.Column(db.String(255), default="pending")
reports = db.relationship('Report', backref="Submission")
team = db.relationship('Team', backref="Submission")
funding = db.relationship('Funding', backref="Submission")
def __init__(self,propid,title,duration,NRP,legal,ethicalAnimal,ethicalHuman,location,coapplicants,collaborators,scientific,lay,declaration,user,proposalPDF):
self.title=title
self.propid=propid
self.duration=duration
self.NRP=NRP
self.legal=legal
self.ethicalAnimal=ethicalAnimal
self.ethicalHuman=ethicalHuman
self.location=location
self.coapplicants=coapplicants
self.collaborators=collaborators
self.scientific=scientific
self.lay=lay
self.declaration=declaration
self.user=user
self.proposalPDF=proposalPDF
self.draft=True
def setDraftFalse(self):
self.draft=False
class Funding(db.Model):
__tablename__ = 'Funding'
StartDate = db.Column(db.Date, nullable=False)
EndDate = db.Column(db.Date, nullable=False)
AmountFunding = db.Column(db.Integer, nullable=False)
FundingBody = db.Column(db.String(255))
FundingProgramme = db.Column(db.String(255), nullable=False)
Stats = db.Column(db.String(255), nullable=False)
PrimaryAttribution = db.Column(db.String(255), nullable=False)
orcid = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'), nullable=False)
subid = db.Column(db.Integer, db.ForeignKey('Submission.subid'), nullable=False)
ID=db.Column(db.Integer, nullable=False, primary_key=True)
def __init__(self,subid,StartDate, EndDate, AmountFunding, FundingBody, FundingProgramme, Stats, PrimaryAttribution, orcid):
self.StartDate = StartDate
self.EndDate = EndDate
self.AmountFunding = AmountFunding
self.FundingBody = FundingBody
self.FundingProgramme = FundingProgramme
self.Stats = Stats
self.PrimaryAttribution = PrimaryAttribution
self.orcid = orcid
self.subid=subid
def __repr__(self):
return f"User('{self.StartDate}', '{self.FundingProgramme}', '{self.AmountFunding}')"
class ExternalReview(db.Model):
__tablename__="ExternalReview"
id=db.Column(db.Integer,primary_key=True,nullable=False)
Submission=db.Column(db.Integer,db.ForeignKey('Submission.subid'),nullable=False)
reviewer=db.Column(db.Integer,db.ForeignKey('Researcher.orcid'),nullable=False)
Complete=db.Column(db.Boolean,default=False,nullable=False)
review=db.Column(db.String(255),nullable=False)
def __init__(self,Submission,reviewer,Complete,review):
self.Submission=Submission
self.reviewer=reviewer
self.Complete=Complete
self.review=review
class ExternalPendingReviews(db.Model):
__tablename__="ExternalPendingReviews"
id=db.Column(db.Integer,primary_key=True,nullable=False)
Submission = db.Column(db.Integer, db.ForeignKey('Submission.subid'), nullable=False)
reviewer = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'), nullable=False)
Complete = db.Column(db.Boolean, default=False, nullable=False)
def __init__(self,Submission,reviewer,Complete):
self.Submission=Submission
self.reviewer=reviewer
self.Complete=Complete
class Education(db.Model):
__tablename__ = "Education"
id = db.Column(db.Integer, primary_key=True)
degree = db.Column('Degree', db.String(255))
field = db.Column('Field', db.String(255))
institution = db.Column('Institution', db.String(255))
location = db.Column('Location', db.String(255))
year = db.Column('Year', db.Integer)
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class Employment(db.Model):
__tablename__ = "Employment"
id = db.Column(db.Integer, primary_key=True)
company = db.Column('Company', db.String(255))
location = db.Column('Location', db.String(255))
years = db.Column('Years', db.Integer)
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class Societies(db.Model):
__tablename__ = "Societies"
id = db.Column(db.Integer, primary_key=True)
start_date = db.Column('StartDate', db.Date)
end_date = db.Column('EndDate', db.Date)
society = db.Column('Society', db.String(255))
membership = db.Column('Membership', db.String(255))
status = db.Column('Status', db.String(20))
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class Awards(db.Model):
__tablename__ = "Awards"
id = db.Column(db.Integer, primary_key=True)
year = db.Column('Year', db.Integer)
award_body = db.Column('AwardingBody', db.String(255))
details = db.Column('Details', db.String(255))
team_member = db.Column('TeamMember', db.String(255))
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class Team(db.Model):
__tablename__ = "Team"
team_id = db.Column("TeamID", db.Integer, primary_key=True)
team_leader = db.Column("TeamLeader", db.Integer, db.ForeignKey('Researcher.orcid'))
#change to sub id
subid = db.Column("SubmissionID", db.Integer, db.ForeignKey('Submission.subid'))
class TeamMembers(db.Model):
__tablename__ = "TeamMembers"
id = db.Column(db.Integer, primary_key=True)
start_date = db.Column("StartDate", db.Date)
departure_date = db.Column("DepartureDate", db.Date)
name = db.Column("Name", db.String(255))
position = db.Column("position", db.String(255))
primary_attribution = db.Column("PrimaryAttribution", db.String(255))
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
team_id = db.Column(db.Integer, db.ForeignKey('Team.TeamID'))
#subid = db.Column(db.Integer, nullable="False")
class Impacts(db.Model):
__tablename__ = "Impacts"
id = db.Column(db.Integer, primary_key=True)
title = db.Column("Title", db.String(255))
category = db.Column("Category", db.String(255))
primary_beneficiary = db.Column("PrimaryBeneficiary", db.String(255))
primary_attribution = db.Column("PrimaryAttribution", db.String(255))
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class InnovationAndCommercialisation(db.Model):
__tablename__ = "InnovationAndCommercialisation"
id = db.Column(db.Integer, primary_key=True)
year = db.Column("Year", db.Integer)
type = db.Column("Type", db.String(255))
title = db.Column("Title", db.String(255))
primary_attribution = db.Column("PrimaryAttribution", db.String(255), nullable=False)
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class Publications(db.Model):
__tablename__ = "Publications"
id = db.Column(db.Integer, primary_key=True)
year = db.Column("Year", db.Integer)
type = db.Column("Type", db.String(255))
title = db.Column("Title", db.String(255))
name = db.Column("Name", db.String(255))
status = db.Column("Status", db.String(255))
doi = db.Column("DOI", db.String(255))
primary_attribution = db.Column("PrimaryAttribution", db.String(255))
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class Presentations(db.Model):
__tablename__ = "Presentations"
id = db.Column(db.Integer, primary_key=True)
year = db.Column("Year", db.Integer)
title = db.Column("Title", db.String(255))
type = db.Column("Type", db.String(255))
conference = db.Column("Conference", db.String(255))
invited_seminar = db.Column("InvitedSeminar", db.String(255))
keynote = db.Column("Keynote", db.String(255))
organising_body = db.Column("OrganisingBody", db.String(255))
location = db.Column("Location", db.String(255))
primary_attribution = db.Column("PrimaryAttribution", db.String(255))
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class Collaborations(db.Model):
__tablename__ = "Collaborations"
id = db.Column(db.Integer, primary_key=True)
start_date = db.Column("StartDate", db.Date)
end_date = db.Column("EndDate", db.Date)
institution = db.Column("Institution", db.String(255))
department = db.Column("Department", db.String(255))
location = db.Column("Location", db.String(255))
name_collaborator = db.Column("NameCollaborator", db.String(255))
primary_goal = db.Column("PrimaryGoal", db.String(255))
frequency_of_interaction = db.Column("FrequencyOfInteraction", db.String(255))
primary_attribution = db.Column("PrimaryAttribution", db.String(255))
academic = db.Column("Academic", db.Boolean)
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class OrganisedEvents(db.Model):
__tablename__ = "OrganisedEvents"
id = db.Column(db.Integer, primary_key=True)
start_date = db.Column("StartDate", db.Date)
end_date = db.Column("EndDate", db.Date)
title = db.Column("Title", db.String(255))
type = db.Column("Type", db.String(255))
role = db.Column("Role", db.String(255))
location = db.Column("Location", db.String(255))
primary_attribution = db.Column("PrimaryAttribution", db.String(255))
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class EducationAndPublicEngagement(db.Model):
__tablename__ = "EducationAndPublicEngagement"
id = db.Column(db.Integer, primary_key=True)
name = db.Column("Name", db.String(255))
start_date = db.Column("StartDate", db.Date)
end_date = db.Column("EndDate", db.Date)
activity = db.Column("Activity", db.String(255))
topic = db.Column("Topic", db.String(255))
target_area = db.Column("TargetArea", db.String(255))
primary_attribution = db.Column("PrimaryAttribution", db.String(255))
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
class Report(db.Model):
__tablename__ = "Report"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(255), nullable=False)
pdf = db.Column(db.String(255))
type = db.Column(db.String(255), nullable=False)
ORCID = db.Column(db.Integer, db.ForeignKey('Researcher.orcid'))
subid = db.Column(db.Integer, db.ForeignKey('Submission.subid'), nullable="False")
# -------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------------
# Below are the form classes that inherit the FlaskForm class.
# You can set the requirements for each attribute here instead of doing it in the html file
class LoginForm(FlaskForm):
# this is the class for the login form in the sign_in.html
email = StringField('Email', validators=[InputRequired(), Email(message="Invalid Email"), Length(max=50)])
password = PasswordField('Password', validators=[InputRequired(), Length(min=8, max=80)])
remember = BooleanField('Remember me')
forgot = StringField("Forgot your password")
class ForgotForm(FlaskForm):
email = StringField("Email", validators=[InputRequired(), Email(message="Invalid Email"),Length(max=50)])
reEmail = StringField("Re-type Email", validators=[InputRequired(), Email(message="Invalid Email"),Length(max=50)])
submit = SubmitField('Reset Password')
class ResetForm(FlaskForm):
new = PasswordField("New Password", validators=[InputRequired(), Length(min=8,max=80), EqualTo('repeat', message='Passwords must match')])
repeat = PasswordField("Re-type Password", validators=[InputRequired(), Length(min=8,max=80)])
submit = SubmitField('Reset Password')
class UpdateInfoForm(FlaskForm):
#this is the class for the register form in the sign_up.html
first_name = StringField('First Name:' , validators=[InputRequired(), Length(max=20)])
last_name = StringField('Last Name:', validators=[InputRequired(), Length(max=20)])
email = StringField('Email:', validators=[InputRequired(), Email(message="Invalid Email"), Length(max=50)])
job = StringField('Job: ', validators=[InputRequired(), Length(max=255)])
prefix = StringField('Prefix: ', validators=[InputRequired(), Length(max=20)])
suffix = StringField('Suffix: ', validators=[InputRequired(), Length(max=20)])
phone = IntegerField('Phone: ')
phone_extension = IntegerField('Phone Extension: ')
submit = SubmitField('Edit')
class RegisterForm(FlaskForm):
#this is the class for the register form in the sign_up.html
orcid = IntegerField('ORCID:', validators=[InputRequired()])
first_name = StringField('First Name:', validators=[InputRequired(), Length(max=20)])
last_name = StringField('Last Name:', validators=[InputRequired(), Length(max=20)])
email = StringField('Email:', validators=[InputRequired(), Email(message="Invalid Email"), Length(max=50)])
password = PasswordField('Password:', validators=[InputRequired(), Length(min=8, max=80), EqualTo('confirm', message='Passwords must match')])
confirm = PasswordField('Repeat password')
job = StringField('Job: ', validators=[InputRequired(), Length(max=255)])
prefix = StringField('Prefix: ', validators=[InputRequired(), Length(max=20)])
suffix = StringField('Suffix: ', validators=[InputRequired(), Length(max=20)])
phone = IntegerField('Phone: ')
phone_extension = IntegerField('Phone Extension: ')
class ManageForm(FlaskForm):
researcher = SelectField("User")
role = SelectField('Role: ', choices=[('Researcher','Researcher'),('Reviewer','Reviewer'),("Admin","Admin")])
submit = SubmitField('Apply')
#form for form creations
class formCreationForm(FlaskForm):
start_date = DateField('Start Date', validators=[InputRequired()], render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date', validators=[InputRequired()], render_kw={"placeholder": "YYYY-MM-DD"})
funding_amount = IntegerField('Funding Amount', validators=[InputRequired()], render_kw={"placeholder": "Amount"})
funding_body = StringField('Funding Body', validators=[InputRequired()], render_kw={"placeholder": "Funding Body"})
funding_programme = StringField('Funding Programme', validators=[InputRequired()], render_kw={"placeholder": "Funding Programme"})
status = StringField('Status', validators=[InputRequired()], render_kw={"placeholder": "Active/Inactive"})
primary_attribution = StringField('Primary Attribution', validators=[InputRequired()], render_kw={"placeholder": "Primary Attribution"})
submit = SubmitField('Submit')
class UpdateEducationForm(FlaskForm):
idd = "edu"
id = StringField('ID:', validators=[ Length(max=50)])
degree = StringField('Degree:', validators=[ Length(max=50)])
institution = StringField('Institution:', validators=[ Length(max=50)])
location = StringField('Locations:', validators=[Length(max=50)])
year = IntegerField('Year ' )
field = StringField('Field:', validators=[ Length(max=50)])
submit_edu = SubmitField('Edit Education')
remove_edu = SubmitField('Remove')
class AddFundingForm(FlaskForm):
start_date = DateField('Start Date', validators=[InputRequired()], render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date', validators=[InputRequired()], render_kw={"placeholder": "YYYY-MM-DD"})
amount_funding = IntegerField('Amount Funding', )
funding_body = StringField('Funding Body', validators=[ Length(max=50)] )
funding_programme = StringField('Funding Programme ', validators=[ Length(max=50)])
stats = StringField('Stats', validators=[ Length(max=50)])
primary_attribution = StringField('Primary Attribution', validators=[ Length(max=50)])
submit = SubmitField('Add')
class AddTeamForm(FlaskForm):
team_id = StringField('Team ID' , validators=[ Length(max=50)] )
team_leader = StringField('Team Leader' , validators=[ Length(max=50)] )
proposal_id = StringField('Proposal ID' , validators=[ Length(max=50)] )
submit = SubmitField('Add')
class AddEducationForm(FlaskForm):
degree = StringField('Degree:', validators=[ Length(max=50)])
institution = StringField('Institution:', validators=[ Length(max=50)])
location = StringField('Locations:', validators=[Length(max=50)])
year = IntegerField('Year ' )
field = StringField('Field:', validators=[ Length(max=50)])
submit = SubmitField('Add Education')
class AddPublications(FlaskForm):
year = IntegerField("Year")
type = StringField("Type", validators=[Length(max=50)])
title = StringField("Title", validators=[Length(max=50)])
name = StringField("Name", validators=[Length(max=50)])
status = StringField("Status", validators=[Length(max=50)])
doi = StringField("DOI",validators=[Length(max=50)])
primary_attribution = StringField("PrimaryAttribution", validators=[Length(max=50)])
submit = SubmitField('Add Publications')
class AddEmploymentForm(FlaskForm):
company = StringField('Company:', validators=[ Length(max=50)])
location = StringField('Location:', validators=[ Length(max=50)])
years = IntegerField('Years:')
submit = SubmitField('Add')
class UpdatePublications(FlaskForm):
id = StringField("ID:" ,validators=[ Length(max=50)])
year = IntegerField("Year")
type = StringField("Type", validators=[Length(max=50)])
title = StringField("Title", validators=[Length(max=50)])
name = StringField("Name", validators=[Length(max=50)])
status = StringField("Status", validators=[Length(max=50)])
doi = StringField("DOI",validators=[Length(max=50)])
primary_attribution = StringField("PrimaryAttribution", validators=[Length(max=50)])
submit_pub = SubmitField('Edit Publications')
remove_pub = SubmitField('Remove')
class UpdateEmploymentForm(FlaskForm):
id = StringField('ID:', validators=[ Length(max=50)])
company = StringField('Company:', validators=[ Length(max=50)])
location = StringField('Location:', validators=[ Length(max=50)])
years = IntegerField('Years:')
submit_emp = SubmitField('Edit Employment')
remove_emp = SubmitField('Remove')
class UpdateEducationAndPublicEngagement(FlaskForm):
id = StringField('ID' ,validators=[ Length(max=50)])
name = StringField('Name', validators=[Length(max=50)])
start_date = DateField('Start Date', render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date', render_kw={"placeholder": "YYYY-MM-DD"})
activity = StringField('Activity', validators=[Length(max=50)])
topic = StringField('Topic', validators=[Length(max=50)])
target_area = StringField('Target Area', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution', validators=[Length(max=50)])
submit_edup= SubmitField('Edit')
remove_edup = SubmitField('Remove')
class UpdateFundingForm(FlaskForm):
id = StringField('ID:', validators=[ Length(max=50)])
start_date = DateField('Start Date', validators=[InputRequired()], render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date', validators=[InputRequired()], render_kw={"placeholder": "YYYY-MM-DD"})
amount_funding = IntegerField('Amount Funding', )
funding_body = StringField('Funding Body', validators=[ Length(max=50)] )
funding_programme = StringField('Funding Programme ', validators=[ Length(max=50)])
stats = StringField('Status', validators=[ Length(max=50)])
primary_attribution = StringField('Primary Attribution', validators=[ Length(max=50)])
submit_fund = SubmitField('Edit Funding')
remove_fund = SubmitField('Remove')
class UpdateOrganisedEvents(FlaskForm):
id = StringField('ID:', validators=[ Length(max=50)])
start_date = DateField('Start Date', render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date', render_kw={"placeholder": "YYYY-MM-DD"})
title = StringField('Title', validators=[Length(max=50)])
type = StringField('Type', validators=[Length(max=50)])
role = StringField('Role', validators=[Length(max=50)])
location = StringField('Location', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution', validators=[Length(max=50)])
submit_org = SubmitField('Edit')
remove_org = SubmitField('Remove')
class UpdateImpactsForm(FlaskForm):
id = StringField('ID:', validators=[Length(max=50)])
title = StringField('Title: ', validators=[Length(max=50)])
category = StringField('Category: ', validators=[Length(max=50)])
primary_beneficiary = StringField('Primary Beneficiary: ', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution:', validators=[Length(max=50)])
submit_imp = SubmitField('Edit')
remove_imp = SubmitField('Remove')
class UpdatePresentations(FlaskForm):
id = StringField('ID:', validators=[Length(max=50)])
year = IntegerField('Year', )
title = StringField('Title', validators=[Length(max=50)])
type = StringField('Type', validators=[Length(max=50)])
conference = StringField('Conference', validators=[Length(max=50)])
invited_seminar = StringField('Invited Seminar', validators=[Length(max=50)])
keynote = StringField('Keynote', validators=[Length(max=50)])
organising_body = StringField('Organising Body', validators=[Length(max=50)])
location = StringField('Location', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution:' , validators=[Length(max=50)])
submit_pres = SubmitField('Edit')
remove_pres = SubmitField('Remove')
class UpdateCollaborations(FlaskForm):
id = StringField('ID:', validators=[Length(max=50)])
start_date = DateField('Start Date', render_kw={"placeholder": "YYYY-MM-DD"})
end_date =DateField('End Date', render_kw={"placeholder": "YYYY-MM-DD"})
institution = StringField('Institution', validators=[Length(max=50)])
department = StringField('Department', validators=[Length(max=50)])
location = StringField('Location', validators=[Length(max=50)])
name_collaborator = StringField('Name Colloaborator', validators=[Length(max=50)])
primary_goal = StringField('Primary Goal',validators=[Length(max=50)] )
frequency_of_interaction = StringField('Frequency Of Interaction', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution:' , validators=[Length(max=50)])
academic = BooleanField('Academic')
submit_collab = SubmitField('Edit')
remove_collab = SubmitField('Edit')
class UpdateSocietiesForm(FlaskForm):
idd = "socc"
id = StringField('ID:', validators=[ Length(max=50)])
start_date = DateField('Start Date',render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date',render_kw={"placeholder": "YYYY-MM-DD"})
society = StringField('Society:', validators=[ Length(max=50)])
membership = StringField('Membership:',validators=[ Length(max=50)])
status = StringField('Status:',validators=[ Length(max=20)])
submit_soc = SubmitField('Edit Societies')
remove_soc = SubmitField('Remove')
class UpdateInnovation(FlaskForm):
id = StringField('ID', validators=[ Length(max=50)])
year = IntegerField('Year:' )
type = StringField('Type', validators=[Length(max=50)])
title = StringField('Title', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution', validators=[Length(max=50)])
submit_inn = SubmitField('Edit')
remove_inn = SubmitField('Remove')
class UpdateAwardsForm(FlaskForm):
id = StringField('ID:', validators=[ Length(max=50)])
year = IntegerField('Year:')
award_body = StringField('Awarding Body:', validators=[ Length(max=50)])
details = StringField('Detail:', validators=[Length(max=50)])
team_member = StringField('Team Member ', validators=[Length(max=50)])
submit_awrd = SubmitField('Edit Awards')
remove_awrd = SubmitField('Remove')
class AddSocietiesForm(FlaskForm):
start_date = DateField('Start Date',render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date',render_kw={"placeholder": "YYYY-MM-DD"})
society = StringField('Society:', validators=[ Length(max=50)])
membership = StringField('Membership:',validators=[ Length(max=50)])
status = StringField('Status:',validators=[ Length(max=20)])
submit = SubmitField('Add Society')
class AddPresentations(FlaskForm):
year = IntegerField('Year', )
title = StringField('Title', validators=[Length(max=50)])
type = StringField('Type', validators=[Length(max=50)])
conference = StringField('Conference', validators=[Length(max=50)])
invited_seminar = StringField('Invited Seminar', validators=[Length(max=50)])
keynote = StringField('Keynote', validators=[Length(max=50)])
organising_body = StringField('Organising Body', validators=[Length(max=50)])
location = StringField('Location', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution:' , validators=[Length(max=50)])
submit = SubmitField('Add Presentation')
class AddCollaborations(FlaskForm):
start_date = DateField('Start Date', render_kw={"placeholder": "YYYY-MM-DD"})
end_date =DateField('End Date', render_kw={"placeholder": "YYYY-MM-DD"})
institution = StringField('Institution', validators=[Length(max=50)])
department = StringField('Department', validators=[Length(max=50)])
location = StringField('Location', validators=[Length(max=50)])
name_collaborator = StringField('Name Colloaborator', validators=[Length(max=50)])
primary_goal = StringField('Primary Goal',validators=[Length(max=50)] )
frequency_of_interaction = StringField('Frequency Of Interaction', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution:' , validators=[Length(max=50)])
academic = BooleanField('Academic')
submit = SubmitField('Add Collaborations')
class AddOrganisedEvents(FlaskForm):
start_date = DateField('Start Date', render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date', render_kw={"placeholder": "YYYY-MM-DD"})
title = StringField('Title', validators=[Length(max=50)])
type = StringField('Type', validators=[Length(max=50)])
role = StringField('Role', validators=[Length(max=50)])
location = StringField('Location', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution', validators=[Length(max=50)])
submit = SubmitField('Add Organised Event')
class AddEducationAndPublicEngagement(FlaskForm):
name = StringField('Name', validators=[Length(max=50)])
start_date = DateField('Start Date', render_kw={"placeholder": "YYYY-MM-DD"})
end_date = DateField('End Date', render_kw={"placeholder": "YYYY-MM-DD"})
activity = StringField('Activity', validators=[Length(max=50)])
topic = StringField('Topic', validators=[Length(max=50)])
target_area = StringField('Target Area', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution', validators=[Length(max=50)])
submit = SubmitField('Add Education and Public Engagement')
class AddAwardsForm(FlaskForm):
year = IntegerField('Year:')
award_body = StringField('Awarding Body:', validators=[ Length(max=50)])
details = StringField('Detail:', validators=[Length(max=50)])
team_member = StringField('Team Member ', validators=[Length(max=50)])
submit = SubmitField('Add Awards')
class AddInnovation(FlaskForm):
year = IntegerField('Year:' )
type = StringField('Type', validators=[Length(max=50)])
title = StringField('Title', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution', validators=[Length(max=50)])
submit = SubmitField('Add Innovation')
class AddTeamMembersForm(FlaskForm):
start_date = DateField('Start Date',render_kw={"placeholder": "YYYY-MM-DD"})
departure_date = DateField('Departure Date',render_kw={"placeholder": "YYYY-MM-DD"})
name = StringField('Name:', validators=[ Length(max=50)])
position = StringField('Position:',validators=[ Length(max=50)])
primary_attribution = StringField('Primary Attribution:',validators=[ Length(max=20)])
team_id = IntegerField('TeamID')
orcid = IntegerField('ORCID:' )
submit = SubmitField('Add Team Members')
class AddImpactsForm(FlaskForm):
title = StringField('Title: ', validators=[Length(max=50)])
category = StringField('Category: ', validators=[Length(max=50)])
primary_beneficiary = StringField('Primary Beneficiary: ', validators=[Length(max=50)])
primary_attribution = StringField('Primary Attribution:', validators=[Length(max=50)])
submit = SubmitField('Add Impacts')
class ExternalReviewForm(FlaskForm):
pdfReview=FileField('PDF of Review',validators=[InputRequired()])
submit = SubmitField('submit')
class ReportForm(FlaskForm):
title = StringField('Title: ', validators=[Length(max=50), InputRequired()])
pdf = FileField('PDF: ', validators=[InputRequired()])
submit = SubmitField('Add')
#form for submission
class Submission_Form(FlaskForm):
propid = StringField('propid')
title = StringField('Title', validators=[InputRequired()],render_kw={"placeholder": "Title"})
duration = IntegerField('Duration', validators=[InputRequired()],render_kw={"placeholder": "Duration in months"})
NRP = SelectField(u'NRP', choices=[('areaA','Priority Area A - Future Networks & Communications'),
('areaB', 'Priority Area B - Data Analytics, Management, Securitu & Privacy'),
('areaC', 'Priority Area C - Digital Platforms, Content & Applications'),
('areaD', 'Priority Area D - Connected Health and Independent Living'),
('areaE', 'Priority Area E - Medical Devices'),
('areaF', 'Priority Area F - Diagnostics'),
('areaG', 'Priority Area G - Therapeutics : Synthesis, Formulation, Processing and Drug Delivery'),
('areaH', 'Priority Area H - Food for Health'),
('areaI', 'Priority Area I - Sustainable Food Production'),
('areaJ', 'Priority Area J - Marine Renewable Energy'),
('areaK', 'Priority Area K - Smart Grids & Smart Cities'),
('areaL', 'Priority Area L - Manufacturing Competitiveness'),
('areaM', 'Priority Area M - Processing Technologies and Novel Materials'),
('areaN', 'Priority Area N - Innovation in Services and Buisness Processses'),
('Software', 'Software'),
('Others', 'Others')
])
legal_remit = TextAreaField("Please describe how your proposal is aligned with SFI's legal remit (max 250 words)"
,validators=[InputRequired(), length(max=1250) ],render_kw={"placeholder": "Legal remit"}
)
ethical_animal = TextAreaField("A statement indicating whether the research involves the use of animals"
,validators=[InputRequired()],render_kw={"placeholder": "Animal ethics statement"}
)
ethical_human = TextAreaField("A statement indicating whether the research involves human participants, human biological material, or identifiable data"
, validators=[InputRequired()], render_kw={"placeholder": "Human ethics statement"}
)
location = TextAreaField("A statement of the applicant’s location (country) at the time of submission"
, validators=[InputRequired()], render_kw={"placeholder": "Location statement"})
co_applicants = TextAreaField("A list of co-applicants if applicable",render_kw={"placeholder": "List of co-applicants eg: '- name' "})
collaborators = TextAreaField("Alist of collaborators, if applicable. Information about collaborators should include:( -Name -Organization -Email )"
,render_kw={"placeholder":"-name\n-organisation\n-Email;"})
scientific_abstract = TextAreaField("Scientific Abstract( max 200 words )",
validators=[InputRequired(), length(max=1000)], render_kw={"placeholder":"Scientific Abstract"} )
lay_abstract = TextAreaField("Lay Abstract( max 100 words )",
validators=[InputRequired(), length(max=500)], render_kw={"placeholder":"Lay Abstract"})
proposalPDF = FileField("PDF of proposal" ,validators=[InputRequired()])
declaration = BooleanField('Agree?', validators=[DataRequired(), ])
submit = SubmitField('Submit')
validate = SubmitField('Validate form')
draft = SubmitField('Save Draft')
def setPropId(self, propid):
self.propid=propid
class proposalForm(FlaskForm):
title = StringField('Title', validators=[InputRequired()],render_kw={"placeholder": "Title"})
deadline = DateField('Deadline', validators=[InputRequired()], render_kw={"placeholder": "YYYY-MM-DD"})
text_of_call = TextAreaField('Text of Call', validators=[InputRequired()], render_kw={"placeholder": "Text of call"})
target_audience = StringField('Target Audience', validators=[InputRequired()], render_kw={"placeholder": "Target Audience"})
eligibility_criteria = TextAreaField('Eligibility Criteria', validators=[InputRequired()], render_kw={"placeholder": "Eligibility Criteria"})
duration = IntegerField('Duration', validators=[InputRequired()], render_kw={"placeholder": "Duration in Months"})
reporting_guidelines = TextAreaField('Reporting Guidlines', validators=[InputRequired()], render_kw={"placeholder": "Reporting Guidelines"})
time_frame = StringField('Time frame', validators=[InputRequired()], render_kw={"placeholder": "Time Frame"})
picture = FileField('Upload Proposal Picture', validators=[FileAllowed(['jpg', 'png'])])
submit = SubmitField('Submit')
class sendExternalReview(FlaskForm):
ORCID=IntegerField('ORCID',validators=[InputRequired()],render_kw={"placeholder": "ORCID"})
Decline=SubmitField('Decline application')
submit=SubmitField('Send for review')
complete=SubmitField('External Reviews Sent: Mark as under Review')
class ConfirmationForm(FlaskForm):
Sub=StringField("Submission id")
Approve=SubmitField("Approve Application")
Decline=SubmitField("Decline Application")
def setSub(self,sub):
self.Sub=sub
# -------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------------------------
class ExternalReviewForm(FlaskForm):
pdfReview=FileField('PDF of Review',validators=[InputRequired()])
submit = SubmitField('submit')
def admin_setup(orcid):
user=User.query.filter_by(orcid=orcid).first()
user.type="Admin"
db.session.commit()
class AddTeamMemberForm(FlaskForm):
start_date = DateField("Start Date : ", validators=[InputRequired()], render_kw={"placeholder" : "YYYY-MM-DD"})
departure_date = DateField("Departure Date : ", validators=[InputRequired()], render_kw={"placeholder" : "YYYY-MM-DD"})
position = StringField("Position : ", validators=[InputRequired(), length(max=255)], render_kw={"placeholder" : "Position of the team member"})
ORCID = IntegerField("ORCID : ", validators=[InputRequired()], render_kw={"placeholder" : "ORCID of the researcher to add to your team"})
submit = SubmitField("Add")
class CreateTeamForm(FlaskForm):
create = SubmitField("Click here to create a team!")
class DeleteTeamMemberForm(FlaskForm):
delete = SubmitField("Remove")
class EditTeamMemberForm(FlaskForm):
start_date = DateField("Start Date : ")
departure_date = DateField("Departure Date : ")
position = StringField("Position : ")
primary_attribution = StringField("Primary Attribution : ")
submit = SubmitField("Edit")
@login_manager.user_loader
def load_user(user_id):
# this is a function that callsback the user by the user_id
return User.query.get(int(user_id))
def mail(receiver, content="", email="", password="", subject=""):
#function provides default content message, sender's email, and password but accepts
#them as parameters if given
#for now it sends an email to all researchers(i hope) not sure how im supposed to narrow it down yet
#cur = mysql.get_db().cursor()
#cur.execute("SELECT email FROM researchers")
#rv = cur.fetchall()
print(content)
if not content:
content = "Account made confirmation message"
if not email:
email = "team9sendermail@gmail.com"
if not password:
password = "default password"
password = "team9admin"
if not subject:
subject="Account confirmation email"
msg = MIMEText(content)
msg['Subject'] = subject
msg['To'] = receiver
msg['From'] = email
mail = smtplib.SMTP('smtp.gmail.com', 587)
mail.ehlo()
mail.starttls()
mail.login(email, password)
#for email in rv:
mail.sendmail(email,receiver,msg.as_string())
mail.close()
@app.route('/')
@app.route('/home')
def index():
#if current_user.is_authenticated:
# updateType = User.query.filter_by(orcid=current_user.orcid).first()
# updateType.type = "Admin"
# db.session.commit()
# this route returns the home.html file
#conn = mysql.connect
#cur = conn.cursor()
#cur.execute("DROP TABLE Submission;")
#conn.commit()
#cur.close()
#conn.close()
return render_template("/home.html") # directs to the index.html
@app.route('/sign_in', methods=['GET', 'POST'])
def signin():
# this is the login in route
form = LoginForm() # create login form here
if form.validate_on_submit(): # if login form is submitted then
user = User.query.filter_by(email=form.email.data).first() # get user from the database
if not user or not check_password_hash(user.password, form.password.data):
# if user doesn't exist or the password is incorrect
flash('Please check your login details and try again!') # show an error message
return redirect(url_for('signin'))
# else logs in the user
login_user(user, remember=form.remember.data)
if user.type == "Admin":
return redirect(url_for('dashboard')) #returns the admin page
# and redirect to the index page which will be the profile page once its done
return redirect(url_for('dashboard'))
return render_template('sign_in.html', form=form)
@app.route('/forgot', methods=["Get",'Post'])
def forgot():
form = ForgotForm()
if form.submit.data:
email = form.email.data
user = User.query.filter_by(email=email).first()
if user:
send = "Follow this url to reset your password: https://johnnyos1304.pythonanywhere.com/reset?l=%s"%(email)
subject = "Reset Password"
mail(receiver=form.email.data,content=send,subject=subject)
return redirect(url_for('link'))
else:
message="Please enter valid form data"
return render_template('forgot.html', form=form)
return render_template('forgot.html', form=form)
@app.route('/link', methods=["Get","Post"])
def link():
message="Please check your email and follow the instructions."
return render_template("link.html",messages=message)
@app.route("/reset", methods=["Get","Post"])
def reset():
form = ResetForm()
if request.method == "POST":
if form.submit.data:
print("here")
hashed_password = generate_password_hash(form.new.data, method='sha256')
email = request.args.get("l")
user = User.query.filter_by(email=email).first()
if user!=None:
print("here2")
user.password=hashed_password
db.session.commit()
return redirect(url_for("signin"))
else:
email = request.args.get("l")
return render_template("reset.html",l=email,form=form)
@app.route('/sign_up', methods=['GET', 'POST'])
def signup():
form = RegisterForm() # create register form here
if request.method == 'POST':
if form.is_submitted():
print("submitted")
if form.validate():
print("valid")
if form.validate_on_submit():
print("here")# if register form is submitted then
# hash the password
hashed_password = generate_password_hash(form.password.data, method='sha256')
# create a new user for the database
user = User.query.filter_by(email=form.email.data).first()
exist_orcid = User.query.filter_by(orcid=form.orcid.data).first()
if not exist_orcid and not user:
new_user = User(orcid=form.orcid.data, first_name=form.first_name.data, last_name=form.last_name.data,
email=form.email.data, job=form.job.data, prefix=form.prefix.data, suffix=form.suffix.data,
phone=form.phone.data, phone_extension=form.phone_extension.data, password=hashed_password, type="Researcher")
# add the new user to the database
db.session.add(new_user)
# commit the changes to the database
db.session.commit()
# send confirmation email
mail(form.email.data)
return redirect(url_for('signin')) # a page that acknowledges the user has been created
if user:
flash('This email has already been used', category="email")
if exist_orcid:
flash('This orcid has already been registered', category="orcid")
return redirect(url_for('signup'))
return render_template('sign_up.html', form=form) # return the signup html page
@app.route('/dashboard')
@login_required
def dashboard():
# return the dashboard html file with the user passed to it
profile = getProfileInfo()
applications = Submissions.query.filter_by(user=current_user.orcid).all()
reports = current_user.reports
scientific_reports = []
financial_reports = []
for each in reports:
if each.type == "Scientific":
scientific_reports.append(each)
elif each.type == "Financial":
financial_reports.append(each)
return render_template('dashboard.html', user=current_user, applications=applications, s_reports=scientific_reports, f_reports=financial_reports, info=profile)
@app.route('/scientific_reports', methods=["GET", "POST"] )
@login_required
def scientific_reports():
id = request.args.get("id")
print(id)
form = ReportForm()
reports = current_user.reports
s_reports = []
for each in reports:
if each.type == "Scientific":
s_reports.append(each)
if request.method == "POST":
print(form.title.data)
print(form.pdf.data)
if form.is_submitted():
print("submitted")
if form.validate():
print("validated")
if form.validate_on_submit():
file = request.files['pdf']
if file.filename=="":
flash('No selected file')
return redirect(url_for(scientific_reports))
if file:
filename = secure_filename(file.filename)
file.save('/home/Johnnyos1304/Team9/uploads/'+filename)
#file.save('uploads/'+filename)
filenamesecret = uuid.uuid4().hex
print("file saved")
newReport = Report(title=form.title.data, type="Scientific", pdf=filenamesecret, ORCID=current_user.orcid, subid=id)
db.session.add(newReport)
db.session.commit()
return redirect(url_for('scientific_reports'))
return render_template("scientific_reports.html", reports=s_reports, form=form, id=id)
# @app.route('/edit')
# @login_required
'''if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('uploaded_file',
filename=filename))'''
@app.route('/financial_reports', methods=["GET", "POST"])
@login_required
def financial_reports():
id = request.args.get("id")
form = ReportForm()
reports = current_user.reports
f_reports = []
for each in reports:
if each.type == "Financial":
f_reports.append(each)
if request.method == "POST":
print(form.title.data)
print(form.pdf.data)
if form.is_submitted():
print("submitted")
if form.validate():
print("validated")
if form.validate_on_submit():
file = request.files['pdf']
if file.filename=="":
flash('No selected file')
return redirect(url_for(finanical_reports))
if file:
filename = secure_filename(file.filename)
file.save('/home/Johnnyos1304/Team9/uploads/'+filename)
#file.save('uploads/'+filename)
filenamesecret = uuid.uuid4().hex
print("file saved")
newReport = Report(title=form.title.data, type="Financial", pdf=filenamesecret, ORCID=current_user.orcid, subid=id)
db.session.add(newReport)
db.session.commit()
return redirect(url_for('financial_reports'))
return render_template("financial_reports.html", reports=f_reports, form=form)
@app.route('/current_applications')
@login_required
def current_applications():
posts = []
entries=Submissions.query.filter_by(user=current_user.orcid).all()
for i in entries:
post={}
post["status"]=i.status
post["title"]=i.title
posts.append(post)
return render_template("current_applications.html",posts=posts)
@app.route('/completed reviews_list')
@login_required
def completed_reviews_list():
completed = Submissions.query.filter_by(status="Approval Pending").all()
subs=[]
for i in completed:
sub={}
sub["title"] = i.title
sub["id"]=i.subid
sub["status"]=i.status
subs.append(sub)
return render_template("completed_reviews_list.html",sub=subs)
@app.route('/completed_reviews',methods=['GET','POST'])
@login_required
def completed_review():
#complete display of submission,review of submission and approval button
id=request.args.get("id")
rev = {}
sub = {}
prop = {}
#display submission data
if id!=None:
i = Submissions.query.filter_by(subid=id).first()
props = Proposal.query.filter_by(id=i.propid).first()
prop["subid"] = props.id
prop["deadline"] = props.Deadline
prop["text"] = props.TextOfCall
prop["audience"] = props.TargetAudience
prop["eligibility"] = props.EligibilityCriteria
prop["duration"] = props.Duration
prop["guidelines"] = props.ReportingGuidelines
prop["timeframe"] = props.TimeFrame
prop["title"] = props.title
sub["title"] = i.title
sub["duration"] = i.duration
sub["NRP"] = i.NRP
sub["legal"] = i.legal
sub["ethicalAnimal"] = i.ethicalAnimal
sub["ethicalHuman"] = i.ethicalHuman
sub["location"] = i.location
sub["coapplicants"] = i.coapplicants
sub["collaborators"] = i.collaborators
sub["scientific"] = i.scientific
sub["lay"] = i.lay
sub["file"] = i.proposalPDF
review=ExternalReview.query.filter_by(Submission=i.subid).first()
rev["file"]=review.review
rev["reviewer"]=review.reviewer
form=ConfirmationForm()
form.setSub(i)
if form.Decline.data:
print("declined")
form.Sub.status="declined"
db.session.commit()
return redirect(url_for("dashboard"))
if form.Approve.data:
form.Sub.status="Approved"
#create a new funding thingy
#create a new team data thingy
#
db.session.commit()
return redirect(url_for("funding", id=id))
return render_template("completed_reviews.html",form=form,sub=sub,rev=rev,prop=prop)
class FundingForm(FlaskForm):
start_date = DateField("Start Date : ")
end_date = DateField("End Date : ")
amount_funding = IntegerField("Amount Funding : ")
funding_body = TextAreaField("Funding Body : ")
funding_programme = TextAreaField("Funding Programme : ")
stats = StringField("Stats : ")
primary_attribution = StringField("Primary Attribution : ")
submit = SubmitField("Submit")
@app.route('/funding', methods=["GET", "POST"])
@login_required
def funding():
id = request.args.get("id")
submission = Submissions.query.filter_by(subid=id).first()
orcid = submission.user
print(submission)
fundingform = FundingForm()
funding = Funding.query.filter_by(subid=id).first()
if fundingform.submit.data and fundingform.validate():
new_funding = Funding(StartDate=fundingform.start_date.data, EndDate=fundingform.end_date.data,
AmountFunding=fundingform.amount_funding.data, FundingBody=fundingform.funding_body.data,
FundingProgramme=fundingform.funding_programme.data,Stats=fundingform.stats.data,
PrimaryAttribution=fundingform.primary_attribution.data, orcid=orcid, subid=id)
db.session.add(new_funding)
db.session.commit()
return redirect(url_for('dashboard'))
return render_template("funding.html", id=id, fundingform=fundingform, submission=submission, funding=funding)
@app.route('/admin_external_review')
@login_required
def admin_external_review():
posts = []
entries = Submissions.query.filter_by(status="pending").all()
for i in entries:
post = {}
post["status"] = i.status
post["title"] = i.title
post["id"] = i.subid
posts.append(post)
return render_template("admin_external_review.html", posts=posts)
@app.route('/admin_send_review',methods=['GET', 'POST'])
@login_required
def admin_send_review():
form=sendExternalReview()
post=request.args.get("id")
sub={}
prop={}
i = Submissions.query.filter_by(subid=f"{post}").first()
props=Proposal.query.filter_by(id=i.propid).first()
prop["subid"] = props.id
prop["deadline"] = props.Deadline
prop["text"] = props.TextOfCall
prop["audience"] = props.TargetAudience
prop["eligibility"] = props.EligibilityCriteria
prop["duration"] = props.Duration
prop["guidelines"] = props.ReportingGuidelines
prop["timeframe"] = props.TimeFrame
prop["title"] = props.title
sub["title"]=i.title
sub["duration"]=i.duration
sub["NRP"]=i.NRP
sub["legal"]=i.legal
sub["ethicalAnimal"]=i.ethicalAnimal
sub["ethicalHuman"]=i.ethicalHuman
sub["location"]=i.location
sub["coapplicants"]=i.coapplicants
sub["collaborators"]=i.collaborators
sub["scientific"]=i.scientific
sub["lay"]=i.lay
sub["file"]=i.proposalPDF
if form.Decline.data:
i.status="declined"
db.session.add(i)
db.session.commit()
return redirect(url_for("admin_external_review"))
elif form.complete.data:
#change submission to external review when done button is pressed
i.status="review"
db.session.add(i)
db.session.commit()
return redirect(url_for("dashboard"))
reviewer = User.query.filter_by(orcid = form.ORCID.data).first()
email = reviewer.email
mail(email, "Review request made, check your profile")
elif form.ORCID.data!=None:
print("here")
#database push external review link to user
new_external_review=ExternalPendingReviews(post,form.ORCID.data,False)
db.session.add(new_external_review)
db.session.commit()
flash("sent for external review")
return render_template("admin_send_review.html",sub=sub,prop=prop,form=form)
@app.route('/reviewer_pending_list')
@login_required
def reviewer_pending_list():
posts = []
entries = ExternalPendingReviews.query.filter_by(reviewer=current_user.orcid).all()
#change this DB request to look for reveiews appropropriate to the current_user.orcid
for i in entries:
sub=Submissions.query.filter_by(subid=i.Submission).first()
post = {}
post["status"] = sub.status
post["title"] = sub.title
post["id"] = sub.subid
post["file"]=sub.proposalPDF
posts.append(post)
return render_template("reviewer_pending_list.html", posts=posts)
@app.route('/create_submission_form')
@login_required
def create_submission_page():
# return the dashboard html file with the user passed to it
posts=[]
#conn = mysql.connect
#cur = conn.cursor()
# execute a query
proposals = Proposal.query.all()
#cur.execute("""
# SELECT *
# FROM Proposal;
# """)
#for i in cur.fetchall():
for each in proposals:
post={}
post["id"] = each.id
post["deadline"] = each.Deadline
post["text"] = each.TextOfCall
post["audience"] = each.TargetAudience
post["eligibility"] = each.EligibilityCriteria
post["duration"] = each.Duration
post["guidelines"] = each.ReportingGuidelines
post["timeframe"] = each.TimeFrame
posts.append(post)
#conn.commit()
#cur.close()
#conn.close()
return render_template('create_submission_form.html', user=current_user, posts=posts)
# @app.route('/resetpassword')
@app.route('/proposals', methods=['GET' , 'POST'])
@login_required
def proposals():
#posts = []
proposals = Proposal.query.all()
#conn = mysql.connect
#cur = conn.cursor()
# execute a query
"""cur.execute(""
SELECT *
FROM Proposal;
"")"""
#for post in proposals:
#for i in cur.fetchall():
#post = {}
#print(i)
#post["id"] = i[9]
#post["deadline"] = i[0]
#post["text"] = i[2]
#post["audience"] = i[3]
#post["eligibility"] = i[4]
#post["duration"] = i[5]
#post["guidelines"] = i[6]
#post["timeframe"] = i[7]
#post["title"] = i[1]
#posts.append(post)
#conn.commit()
#cur.close()
#conn.close()
return render_template('proposals.html', user=current_user, posts=proposals)
@app.route('/submissions',methods=['GET' , 'POST'])
@login_required
def submissions():
#fix request shit
sub={}
form=Submission_Form()
post=request.args.get("id")
form.setPropId(post)
submissions = Submissions.query.filter_by(propid=post, user=current_user.orcid).first()
#conn = mysql.connect
#cur = conn.cursor()
previousFile=None
"""cur.execute(f""
SELECT *
FROM Submission
WHERE propid = {post} AND user='{current_user.orcid}';
")"""
#for i in cur.fetchall():
# if i[15]==0:
# return render_template("submitted.html")
# form.propid=i[0]
# form.title.data=i[2]
# form.duration.data=i[3]
# form.NRP.data=i[4]
# form.legal_remit.data=i[5]
# form.ethical_animal.data=i[6]
# form.ethical_human.data=i[7]
# form.location.data=i[8]
# form.co_applicants.data=i[9]
# form.collaborators.data=i[10]
# form.scientific_abstract.data=i[11]
# form.lay_abstract.data=i[12]
# form.declaration.data=i[13]
# previousFile=i[16]
#cur.close()
#conn.close()
if form.validate_on_submit():
if form.validate.data:
flash("Input Successfully Validated")
elif form.draft.data:
print(previousFile)
filenamesecret = previousFile
if form.proposalPDF.data!=None:
filenamesecret = uuid.uuid4().hex
if filenamesecret != previousFile:
form.proposalPDF.data.save('/home/Johnnyos1304/Team9/uploads/' + filenamesecret)
else:
while True:
filecheck=Path(f"/home/Johnnyos1304/Team9/uploads/{filenamesecret}")
if filecheck.is_file():
filenamesecret = uuid.uuid4().hex
else:
break
form.proposalPDF.data.save('/home/Johnnyos1304/Team9/uploads/' + filenamesecret)
print(filenamesecret + "1")
if previousFile != None:
os.remove(f"/home/Johnnyos1304/Team9/uploads/{previousFile}")
existing_submission = Submissions.query.filter_by(propid=form.propid, user=current_user.orcid).first()
print(existing_submission)
if existing_submission:
existing_submission.propid = form.propid
existing_submission.title = form.title.data
existing_submission.duration = form.duration.data
existing_submission.NRP = form.NRP.data
existing_submission.legal = form.legal_remit.data
existing_submission.ethicalAnimal = form.ethical_animal.data
existing_submission.ethicalHuman = form.ethical_human.data
existing_submission.location = form.location.data
existing_submission.coapplicants = form.co_applicants.data
existing_submission.collaborators = form.collaborators.data
existing_submission.scientific = form.scientific_abstract.data
existing_submission.lay = form.lay_abstract.data
existing_submission.declaration = form.declaration.data
existing_submission.proposalPDF = filenamesecret
existing_submission.draft = 0
print(existing_submission.legal, " ", form.legal_remit.data)
db.session.commit()
return redirect(url_for("submissions", id=form.propid, sub=sub,submissions=submissions))
new_submission=Submissions(propid=form.propid,title=form.title.data, duration=form.duration.data,
NRP=form.NRP.data,legal=form.legal_remit.data,
ethicalAnimal=form.ethical_animal.data,
ethicalHuman=form.ethical_human.data,
location=form.location.data,
coapplicants=form.co_applicants.data,
collaborators=form.collaborators.data,
scientific=form.scientific_abstract.data,
lay=form.lay_abstract.data,
declaration=form.declaration.data,
user=f"{current_user.orcid}",
proposalPDF=f"{filenamesecret}"
)
db.session.add(new_submission)
db.session.commit()
flash("successfully Saved Draft")
return redirect(url_for("submissions",id=form.propid,sub=sub,submissions=submissions))
elif form.submit.data:
filenamesecret = previousFile
if form.proposalPDF.data!=None:
filenamesecret = uuid.uuid4().hex
if filenamesecret != previousFile:
while True:
filecheck=Path(f"/home/Johnnyos1304/Team9/uploads/{filenamesecret}")
if filecheck.is_file():
filenamesecret = uuid.uuid4().hex
else:
break
form.proposalPDF.data.save('/home/Johnnyos1304/Team9/uploads/' + filenamesecret)
if previousFile != None:
os.remove(f"/home/Johnnyos1304/Team9/uploads/{previousFile}")
existing_submission = Submissions.query.filter_by(propid=form.propid, user=current_user.orcid).first()
if existing_submission:
existing_submission.propid = form.propid
existing_submission.title = form.title.data
existing_submission.duration = form.duration.data
existing_submission.NRP = form.NRP.data
existing_submission.legal = form.legal_remit.data
existing_submission.ethicalAnimal = form.ethical_animal.data
existing_submission.ethicalHuman = form.ethical_human.data
existing_submission.location = form.location.data
existing_submission.coapplicants = form.co_applicants.data
existing_submission.collaborators = form.collaborators.data
existing_submission.scientific = form.scientific_abstract.data
existing_submission.lay = form.lay_abstract.data
existing_submission.declaration = form.declaration.data
existing_submission.proposalPDF = filenamesecret
existing_submission.draft = 0
db.session.commit()
return redirect(url_for("submitted"))
new_submission = Submissions(propid=form.propid, title=form.title.data, duration=form.duration.data,
NRP=form.NRP.data, legal=form.legal_remit.data,
ethicalAnimal=form.ethical_animal.data,
ethicalHuman=form.ethical_human.data,
location=form.location.data,
coapplicants=form.co_applicants.data,
collaborators=form.collaborators.data,
scientific=form.scientific_abstract.data,
lay=form.lay_abstract.data,
declaration=form.declaration.data,
user=f"{current_user.orcid}",
proposalPDF=f"{filenamesecret}"
)
new_submission.setDraftFalse()
db.session.add(new_submission)
db.session.commit()
flash("successfully submitted")
return redirect(url_for("submissions", id=form.propid, sub=sub, submissions=submissions))
i=Proposal.query.filter_by(id=f"{post}").first()
sub["id"] = i.id
sub["deadline"] = i.Deadline
sub["text"] = i.TextOfCall
sub["audience"] = i.TargetAudience
sub["eligibility"] = i.EligibilityCriteria
sub["duration"] = i.Duration
sub["guidelines"] = i.ReportingGuidelines
sub["timeframe"] = i.TimeFrame
sub["title"] = i.title
return render_template('submissions.html', user=current_user, sub=sub,form=form, submissions=submissions)
#needs to be fixed cant save image
def save_picture(form_picture):
random_hex = secrets.token_hex(8)
_, f_ext = os.path.splitext(form_picture)
picture_fn = random_hex + f_ext
picture_path = os.path.join(app.root_path, 'static/propoosal_pics', picture_fn)
output_size = (125, 125)
i = Image.open(form_picture)
i.thumbnail(output_size)
i.save(picture_path)
return picture_fn
@app.route('/download')
@login_required
def download():
filename=request.args.get("file")
dir="uploads"
return send_from_directory(dir,filename,as_attachment=True)
@app.route('/external_review',methods=['GET','POST'])
@login_required
def external_review():
form = ExternalReviewForm()
file=request.args.get("file")
review=request.args.get("pdfReview")
if form.pdfReview.data!=None:
print("here")
filenamesecret = uuid.uuid4().hex
form.pdfReview.data.save('/home/Johnnyos1304/Team9/uploads/' + filenamesecret)
#form.pdfReview.data.save('uploads/' + filenamesecret)
sub=Submissions.query.filter_by(proposalPDF=file).first()
new_review = ExternalReview(sub.subid,current_user.orcid,True,filenamesecret)
sub.status="Approval Pending"
db.session.add(new_review)
db.session.commit()
return redirect(url_for("dashboard"))
if file==None and review==None:
return redirect(url_for("index"))
return render_template('external_review.html',file=file,form=form)
@app.route('/proposal_call', methods=['GET', 'POST'])
@login_required
def proposal_call():
#Creates proposal form
form = proposalForm(request.form)
print(form.errors)
#checks if form is submitted by post
if request.method == 'POST':
if form.is_submitted():
print("submitted")
if form.validate():
print("valid")
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
flash("Successfully logged")
#if form.picture.data: #image processing
# print("here ttt")
# picture_file = save_picture(form.picture.data)
# Image.open(picture_file)
deadline = form.deadline.data
textofcall = form.text_of_call.data
targetaudience = form.target_audience.data
eligibilitycriteria = form.eligibility_criteria.data
duration = form.duration.data
reportingguidelines = form.reporting_guidelines.data
timeframe = form.time_frame.data
title = form.title.data
new_proposal = Proposal(Deadline=deadline, title=title, TextOfCall=textofcall,TargetAudience=targetaudience,
EligibilityCriteria=eligibilitycriteria, Duration=duration, ReportingGuidelines=reportingguidelines, TimeFrame=timeframe)
#conn = mysql.connect
#cur = conn.cursor()
# execute a query
#cur.execute(""INSERT INTO Proposal(Deadline,Title, TextOfCall, TargetAudience, EligibilityCriteria, Duration, ReportingGuidelines, TimeFrame)
## VALUES (%s,%s,%s,%s,%s,%s,%s,%s);""",(deadline,title, textofcall, targetaudience, eligibilitycriteria, duration, reportingguidelines, timeframe))
# rv contains the result of the execute
#conn.commit()
#cur.close()
#conn.close()
db.session.add(new_proposal)
db.session.commit()
#links to form creation
print("here")
return redirect(url_for('dashboard'))
return render_template('proposal_call.html', form=form)
else:
return render_template('proposal_call.html', form=form)
@app.route('/edit_info', methods=['GET', 'POST'])
@login_required
def edit_info():
update_general = UpdateInfoForm(request.form)
update_education = UpdateEducationForm(request.form)
update_societies = UpdateSocietiesForm(request.form)
update_employment = UpdateEmploymentForm(request.form)
update_awards = UpdateAwardsForm(request.form)
update_funding = UpdateFundingForm(request.form)
update_org = UpdateOrganisedEvents(request.form)
update_pub = UpdatePublications(request.form)
update_imp = UpdateImpactsForm(request.form)
update_edup = UpdateEducationAndPublicEngagement(request.form)
update_pres = UpdatePresentations(request.form)
update_collab = UpdateCollaborations(request.form)
update_inn = UpdateInnovation(request.form)
user = current_user
print(user.societies)
if request.method == 'POST':
#print(update_general.errors)
#if input validates pushes to db
#
if update_general.validate_on_submit() :
update_user = User.query.filter_by(orcid=current_user.orcid).first()
update_user.first_name = update_general.first_name.data
update_user.last_name = update_general.last_name.data
update_user.email = update_general.email.data
update_user.job = update_general.job.data
update_user.prefix = update_general.prefix.data
update_user.suffix = update_general.suffix.data
update_user.phone = update_general.phone.data
update_user.phone_extension = update_general.phone_extension.data
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""UPDATE Researcher SET FirstName='{first_name}', LastName='{last_name}', Job='{job}', Prefix='{prefix}', Suffix='{suffix}',
# Phone={phone}, PhoneExtension={phone_extension}, Email='{email}' WHERE ORCID ={current_user.orcid}; """)
##conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('profile'))
# Edit societies
elif update_societies.validate_on_submit() and "submit_soc" in request.form:
updates = Societies.query.filter_by(ORCID=current_user.orcid).all()
id1 = update_societies.id.data
for each in updates:
if each.id == id1:
each.start_date = update_societies.start_date.data
each.end_date = update_societies.end_date.data
each.society = update_societies.society.data
each.membership = update_societies.membership.data
each.status = update_societies.status.data
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""UPDATE Societies SET StartDate= '{start_date}', EndDate='{end_date}', Society = '{society}', Membership = '{membership}',
#Status = '{status}' WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('societiesInfo'))
# Remove societies
elif update_societies.validate_on_submit() and "remove_soc" in request.form:
print("here")
id1 = update_societies.id.data
society = Societies.query.filter_by(id=id1).first()
db.session.delete(society)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
## execute a query
#cur.execute(f"""DELETE FROM Societies WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('edit_info'))
# Edit Education
elif update_education.validate_on_submit() and "submit_edu" in request.form:
degree = update_education.degree.data
institution = update_education.institution.data
location = update_education.location.data
year = update_education.year.data
field = update_education.field.data
id = update_education.id.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""UPDATE Education SET Degree = '{degree}', Institution = '{institution}', Location= '{location}',
Year= {year}, Field = '{field}' WHERE ID ={id}; """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('educationInfo'))
#Remove Edu
elif update_education.validate_on_submit() and "remove_edu" in request.form:
print("here")
id1 = update_education.id.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""DELETE FROM Education WHERE ID ={id1}; """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('edit_info'))
#Edit Employment
elif update_employment.validate_on_submit() and "submit_emp" in request.form:
employment = Employment.query.filter_by(ORCID=current_user.orcid).all()
id2 = update_employment.id.data
for each in employment:
if each.id == id2:
each.company = update_employment.company.data
each.location = update_employment.location.data
each.years = update_employment.years.data
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""UPDATE Employment SET Company = '{company}', Location= '{location}',
# Years= {years} WHERE ID ={id2}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('employmentInfo'))
#Remove Employment
elif update_employment.validate_on_submit() and "remove_emp" in request.form:
print("here")
id1 = update_employment.id.data
employment = Employment.query.filter_by(id=id1).first()
db.session.delete(employment)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""DELETE FROM Employment WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('edit_info'))
#Edit Awards
elif update_awards.validate_on_submit() and "submit_awrd" in request.form:
id3 = update_awards.id.data
awards = Awards.query.filter_by(id=id3).first()
awards.year = update_awards.year.data
awards.award_body = update_awards.award_body.data
awards.details = update_awards.details.data
awards.team_member = update_awards.team_member.data
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""UPDATE Awards SET Year = {year}, AwardingBody = '{award_body}', Details = '{details}',
#TeamMember = '{team_member}' WHERE ID ={id3}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('awardsInfo'))
#Remove Awards
elif update_awards.validate_on_submit() and "remove_awrd" in request.form:
print("here")
id1 = update_awards.id.data
award = Awards.query.filter_by(id=id1).first()
db.session.delete(award)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""DELETE FROM Awards WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('edit_info'))
elif update_org.validate_on_submit() and "submit_org" in request.form:
id1 = update_org.id.data
org = OrganisedEvents.query.filter_by(id=id1).first()
org.start_date = update_org.start_date.data
org.end_date = update_org.end_date.data
org.title = update_org.title.data
org.type = update_org.type.data
org.role = update_org.type.data
org.location = update_org.location.data
org.primary_attribution = update_org.primary_attribution.data
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""UPDATE OrganisedEvents SET StartDate = '{start_date}', EndDate = '{end_date}', Title='{title}', Type = '{type}',
#Role = '{role}', Location = '{location}', PrimaryAttribution = {primary_attribution} WHERE ID = {id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('organised_events_info'))
elif update_org.validate_on_submit() and "remove_org" in request.form:
print("here")
id1 = update_org.id.data
org = OrganisedEvents.query.filter_by(id=id1).first()
db.session.delete(org)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""DELETE FROM OrganisedEvents WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('edit_info'))
elif update_funding.validate_on_submit() and "submit_fund" in request.form:
id1 = update_funding.id.data
funding = Funding.query.filter_by(id=id1).first()
funding.start_date = update_funding.start_date.data
funding.end_date = update_funding.end_date.data
funding.amount_funding = update_funding.amount_funding.data
funding.funding_body= update_funding.funding_body.data
funding.funding_programme = update_funding.funding_programme.data
funding.stats = update_funding.stats.data
funding.primary_attribution = update_funding.primary_attribution.data
#conn = mysql.connect
#funds = Funding.query.filter_by(ID = id1).first
#funds.start_date = start_date
#funds.end_date = end_date
#funds.amount_funding = amount_funding
#funds.funding_body = funding_body
#funds.funding_programme = funding_body
#funds.stats = stats
#funds.primary_attribution = primary_attribution
db.session.commit()
return redirect(url_for('profile'))
#Remove Awards
elif update_funding.validate_on_submit() and "remove_fund" in request.form:
print("here")
id1 = update_funding.id.data
funding = Funding.query.filter_by(id=id1).first()
db.session.delete(funding)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""DELETE FROM Funding WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_pub.validate_on_submit() and "submit_pub" in request.form:
id2 = update_pub.id.data
pub = Publications.query.filter_by(id=id2).first()
pub.year = update_pub.year.data
pub.type = update_pub.type.data
pub.title = update_pub.title.data
pub.name = update_pub.name.data
pub.status = update_pub.status.data
pub.doi = update_pub.doi.data
pub.primary_attribution = update_pub.primary_attribution.data
db.session.commit()
#conn = mysql.connect()
#cur = conn.cursor()
#cur.execute(f"""UPDATE Publications SET Year = {year}, Type = '{type}', Title= '{title}',
# Name = '{name}', Status = '{status}', DOI = '{doi}' WHERE ID = {id2} """)
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_pub.validate_on_submit() and "remove_pub" in request.form:
print("here")
id1 = update_pub.id.data
pub = Publications.query.filter_by(id=id1).first()
db.session.delete(pub)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""DELETE FROM Publications WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_imp.validate_on_submit() and "submit_imp" in request.form:
id2 = update_imp.id.data
imp = Impacts.query.filter_by(id=id2).first()
imp.title = update_imp.title.data
imp.category = update_imp.category.data
imp.primary_beneficiary = update_imp.primary_beneficiary.data
imp.primary_attribution = update_imp.primary_attribution.data
db.session.commit()
#conn = mysql.connect()
#cur = conn.cursor()
#cur.execute(f"""UPDATE Impacts SET Title = '{title}', Category = '{category}' , PrimaryBeneficiary = '{primary_beneficiary}',
#PrimaryAttribution = '{primary_attribution}' WHERE ID = {id2} """)
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_imp.validate_on_submit() and "remove_imp" in request.form:
print("here")
id1 = update_imp.id.data
imp = Impacts.query.filter_by(id=id1).first()
db.session.delete(imp)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""DELETE FROM Impact WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_edup.validate_on_submit() and "submit_edup" in request.form:
id1 = update_edup.id.data
edup = EducationAndPublicEngagement.query.filter_by(id=id1).first()
edup.name = update_edup.name.data
edupstart_date = update_edup.start_date.data
edup.end_date = update_edup.end_date.data
edup.activity = update_edup,activity.data
edup.topic = update_edup.topic.data
edup.target_area = update_edup.target_area.data
edup.primary_attribution = update_edup.primary_attribution.data
db.session.commit()
#conn = mysql.connect()
#cur = conn.cursor()
#cur.execute(f"""UPDATE EducationAndPublicEngagement SET Name = '{name}', StartDate = '{start_date}', EndDate = '{end_date}',
#Activity = '{activity}', Topic = '{topic}', TargetArea = '{target_area}', PrimaryAttribution='{primary_attribution}' WHERE ID = {id1} """)
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_edup.validate_on_submit() and "remove_edup" in request.form:
print("here")
id1 = update_edup.id.data
edup = EducationAndPublicEngagement.query.filter_by(id=id1).first()
db.session.delete(edup)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""DELETE FROM EducationAndPublicEngagemen WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_pres.validate_on_submit() and "submit_pres" in request.form:
id1 = update_pres.id.data
pres = Presentations.query.filter_by(id=id1).first()
pres.year = update_pres.year.data
pres.title = update_pres.title.data
pres.type = update_pres.type.data
pres.conference = update_pres.conference.data
pres.invited_seminar = update_pres.invited_seminar.data
pres.keynote = update_pres.keynote.data
pres.organising_body = update_pres.organising_body.data
pres.location = update_pres.location.data
db.session.commit()
#conn = mysql.connect()
#cur = conn.cursor()
#cur.execute(f"""UPDATE Presentations SET Year = {year}, Title = '{title}', Type = '{type}', Conference='{conference}',
# InvitedSeminar='{invited_seminar}', Keynote = '{keynote}', OrganisedBody = '{organising_body}', Location = '{location}' WHERE ID = {id1} """)
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_pres.validate_on_submit() and "remove_pres" in request.form:
print("here")
id1 = update_pres.id.data
pres = Presentations.query.filter_by(id=id1).first()
db.session.delete(pres)
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""DELETE FROM Presentations WHERE ID ={id1}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('profile'))
elif update_collab.validate_on_submit and "submit_collab" in request.form:
id1 = update_collab.id.data
start_date = update_collab.start_date.data
end_date = update_collab.end_date.data
department = update_collab.end_date.data
location = update_collab.end_date.data
name_collaborator = update_collab.name_collaborator.data
primary_goal = update_collab.primary_goal.data
frequency_of_interaction = update_collab.frequency_of_interaction.data
primary_attribution = update_collab.primary_attribution.data
academic = update_collab.academic.data
conn = mysql.connect()
cur = conn.cursor()
cur.execute(f"""UPDATE Collaboratiions Set StartDate = '{start_date}', EndDate = '{end_date}', Department = '{department}', Location='{location}',
NameCollaborator = '{name_collaborator}', PrimaryGoal = '{primary_goal}', FrquencyOfInteraction = '{frequency_of_interaction}',
PrimaryAttribution='{primary_attribution}', Academic = {academic} WHERE ID = {id1} """)
cur.close()
conn.close()
return redirect(url_for('profile'))
elif update_pres.validate_on_submit and "remove_collab" in request.form:
print("here")
id1 = update_collab.id.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""DELETE FROM Collaborations WHERE ID ={id1}; """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
elif update_inn.validate_on_submit and "submit_inn" in request.form:
id1 = update_inn.id.form
year = update_inn.year.form
type = update_inn.type.form
title = update_inn.title.form
primary_attribution = update_inn.primary_attribution._form
conn = mysql.connect()
cur = conn.cursor()
cur.execute(f"""UPDATE Innovations Set Year = {year}, Type = '{type}', Title = '{title}', PrimaryAttribution = '{primary_attribution}'
WHERE ID = {id1} """)
cur.close()
conn.close()
return redirect(url_for('profile'))
elif update_inn.validate_on_submit and "remove_inn" in request.form:
print("here")
id1 = update_inn.id.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""DELETE FROM Innovations WHERE ID ={id1}; """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('edit_info.html', form1=update_general, form2=update_education , form3=update_societies, form4 = update_employment,
form5 = update_awards,form6 = update_funding ,form7= update_org, form8=update_pub, form9=update_imp ,form10 = update_edup,
form11 = update_pres, form12 = update_collab , form13 = update_inn ,user=user)
@app.route('/generalInfo', methods=['GET', 'POST'])
@login_required
def generalInfo():
#Creates proposal form
form = UpdateInfoForm(request.form)
#checks if form is submitted by post
if request.method == 'POST':
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
#if form.picture.data: #image processing
# print("here ttt")
# picture_file = save_picture(form.picture.data)
# Image.open(picture_file)
current_user.first_name = form.first_name.data
current_user.last_name = form.last_name.data
current_user.email = form.email.data
current_user.job = form.job.data
current_user.prefix = form.prefix.data
current_user.suffix = form.suffix.data
current_user.phone = form.phone.data
current_user.phone_extension = form.phone_extension.data
db.session.commit()
#conn = mysql.connect
#cur= conn.cursor()
# execute a query
#cur.execute(f"""UPDATE Researcher SET FirstName='{first_name}', LastName='{last_name}', Job='{job}', Prefix='{prefix}', Suffix='{suffix}',
# Phone={phone}, PhoneExtension={phone_extension}, Email='{email}' WHERE ORCID ={current_user.orcid}; """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('profile'))
return render_template('generalInfo.html', form=form)
@app.route('/innovation_info', methods=['GET', 'POST'])
@login_required
def innovation_info():
form = AddInnovation(request.form)
innovation = InnovationAndCommercialisation.query.all()
print(innovation)
if request.method == 'POST':
if form.validate_on_submit():
year = form.year.data
type = form.type.data
title = form.title.data
primary_attribution = form.primary_attribution.data
inno = InnovationAndCommercialisation(year=year, type=type, title=title,primary_attribution=primary_attribution,ORCID=current_user.orcid)
db.session.add(inno)
db.session.commit()
#conn = mysql.connect
#cur = conn.cursor()
#cur.execute(f"""INSERT Into InnovationAndCommercialisation (Year, Type, Title, PrimaryAttribution, ORCID) VALUES ('{year}','{type}','{title}',
#'{primary_attribution}', {current_user.orcid}) """)
#conn.commit()
#cur.close()
#conn.close()
return redirect(url_for('innovation_info'))
return render_template('innovation_info.html', form = form)
innovation_list = current_user.inno_and_comm
print(innovation_list)
return render_template('innovation_info.html', form=form, list = innovation_list)
@app.route('/presentations_info', methods=['GET','POST'])
@login_required
def presentations_info():
form = AddPresentations(request.form)
presentations = Presentations.query.all()
print(presentations)
if request.method == 'POST':
if form.validate_on_submit():
year = form.year.data
title = form.title.data
type = form.type.data
conference = form.conference.data
invited_seminar = form.invited_seminar.data
keynote = form.keynote.data
organising_body = form.organising_body.data
location = form.location.data
primary_attribution = form.primary_attribution.data
conn = mysql.connect
cur = conn.cursor()
cur.execute(f""" INSERT Into Presentations (Year, Title, Type, Conference, InvitedSeminar, Keynote, OrganisingBody,
Location, PrimaryAttribution, ORCID) VALUES ({year}, '{title}','{type}', '{conference}', '{invited_seminar}' , '{keynote}', '{organising_body}',
'{location}', '{primary_attribution}', {current_user.orcid});""")
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('presentations_info.html', form=form)
presentations_list = current_user.presentations
return render_template('presentations_info.html', form=form, list=presentations_list)
@app.route('/collaborations_info', methods=['GET','POST'])
@login_required
def collaborations_info():
form = AddCollaborations(request.form)
collaborations = Collaborations.query.all()
print(collaborations)
if request.method == 'POST':
if form.validate_on_submit():
start_date = form.start_date.data
end_date = form.end_date.data
institution = form.institution.data
department = form.department.data
location = form.location.data
name_collaborator = form.name_collaborator.data
primary_goal = form.primary_goal.data
frequency_of_interaction = form.frequency_of_interaction.data
primary_attribution = form.primary_attribution.data
academic = form.academic.data
conn = mysql.connect
cur = conn.cursor()
cur.execute(f""" INSERT Into Collaborations (StartDate, EndDate, Institution, Department, Location, NameCollaborator,
PrimaryGoal,FrequencyOfInteraction, PrimaryAttribution,Academic, ORCID) VALUES ('{start_date}','{end_date}','{institution}'
,'{department}','{location}','{name_collaborator}','{primary_goal}','{frequency_of_interaction}',
'{primary_attribution}',{academic},{current_user.orcid});""")
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('collaborations_info.html',form=form)
collaborations_list = current_user.collab
return render_template('collaborations_info.html', form=form, list= collaborations_list)
@app.route('/funding_info', methods=['GET', 'POST'])
@login_required
def funding_info():
form = AddFundingForm(request.form)
funding = Funding.query.all()
print(funding)
if request.method == 'POST':
if form.validate_on_submit():
start_date = form.start_date.data
end_date = form.end_date.data
amount_funding = form.amount_funding.data
funding_body = form.funding_body.data
funding_programme = form.funding_programme.data
stats = form.stats.data
primary_attribution = form.primary_attribution.data
conn = mysql.connect
cur = conn.cursor()
cur.execute(f""" INSERT Into Funding (StartDate, EndDate, AmountFunding,FundingBody,FundingProgramme,
Stats, PrimaryAttribution, ORCID) VALUES ('{start_date}','{end_date}', {amount_funding},
'{funding_body}','{funding_programme}', '{stats}', '{primary_attribution}', {current_user.orcid});""")
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('funding_info.html', form=form)
funding_list = current_user.funding
print(funding_list)
return render_template('funding_info.html', form=form, list = funding_list)
@app.route('/publications_info', methods=['GET','POST'])
@login_required
def publications_info():
form = AddPublications(request.form)
publications = Publications.query.all()
if request.method =='POST':
if form.validate_on_submit():
year = form.year.data
type = form.type.data
title = form.title.data
name = form.name.data
status = form.status.data
doi = form.doi.data
primary_attribution = form.primary_attribution.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO Publications (Year, Type, Title, Name, Status, DOI, PrimaryAttribution,ORCID)
VALUES ({year},'{type}','{title}','{name}','{status}','{doi}','{primary_attribution}',{current_user.orcid}); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('publications_info.html', form=form) # list=impacts_list
else:
publications_list = current_user.publications
return render_template('publications_info.html', form=form, list=publications_list)
@app.route('/educationInfo', methods=['GET', 'POST'])
@login_required
def educationInfo():
#Creates proposal form
form= AddEducationForm(request.form)
education_list = current_user.education
if request.method == 'POST':
#if input validates pushes to db
if form.validate_on_submit():
#if form.picture.data: #image processing
# print("here ttt")
# picture_file = save_picture(form.picture.data)
# Image.open(picture_file)
degree = form.degree.data
institution= form.institution.data
location= form.location.data
year = form.year.data
field = form.field.data
print(degree)
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO Education (Degree,Institution,
Location, Year, Field, ORCID) VALUES ('{degree}','{institution}','{location}',{year},'{field}',{current_user.orcid}); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('educationInfo'))
return render_template('educationInfo.html', form=form, list=education_list)
@app.route('/employmentInfo', methods=['GET', 'POST'])
@login_required
def employmentInfo():
#Creates proposal form
form = AddEmploymentForm(request.form)
employment_list = current_user.employment
if request.method == 'POST':
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
#if form.picture.data: #image processing
# print("here ttt")
# picture_file = save_picture(form.picture.data)
# Image.open(picture_file)
company = form.company.data
location= form.location.data
years = form.years.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO Employment (Company,Location,Years, ORCID) VALUES ('{company}',
'{location}',{years},{current_user.orcid}); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('employmentInfo'))
return render_template('employmentInfo.html', form=form, list=employment_list)
@app.route('/societiesInfo', methods=['GET', 'POST'])
@login_required
def societiesInfo():
#Creates proposal form
form = AddSocietiesForm(request.form)
societies_list = current_user.societies
if request.method == 'POST':
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
#if form.picture.data: #image processing
# print("here ttt")
# picture_file = save_picture(form.picture.data)
# Image.open(picture_file)
start_date = form.start_date.data
end_date = form.end_date.data
society = form.society.data
membership = form.membership.data
status = form.status.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO Societies (StartDate, EndDate, Society, Membership, Status, ORCID) VALUES ('{start_date}',
'{end_date}','{society}','{membership}','{status}',{current_user.orcid}); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('societiesInfo'))
return render_template('societiesInfo.html', form=form, list=societies_list)
@app.route('/organised_events', methods=['GET', 'POST'])
@login_required
def organised_events():
#Creates proposal form
form = AddOrganisedEvents(request.form)
organised_events = OrganisedEvents.query.all()
if request.method == 'POST':
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
#if form.picture.data: #image processing
# print("here ttt")
# picture_file = save_picture(form.picture.data)
# Image.open(picture_file)
start_date = form.start_date.data
end_date = form.end_date.data
title = form.title.data
type = form.type.data
role = form.role.data
location = form.location.data
primary_attribution = form.primary_attribution.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO OrganisedEvents (StartDate, EndDate, Title, Type, Role, Location, PrimaryAttribution, ORCID) VALUES ('{start_date}',
'{end_date}','{title}','{type}','{role}','{location}', '{primary_attribution}','{current_user.orcid}'); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('organised_events.html', form=form)
organised_events_list = current_user.organised_events
print(organised_events_list)
return render_template('organised_events.html', form=form, list=organised_events_list)
@app.route('/education_and_public_engagement', methods=['GET', 'POST'])
@login_required
def education_and_public_engagement():
#Creates proposal form
form = AddEducationAndPublicEngagement(request.form)
education_and_public_engagement = EducationAndPublicEngagement.query.all()
if request.method == 'POST':
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
#if form.picture.data: #image processing
# print("here ttt")
# picture_file = save_picture(form.picture.data)
# Image.open(picture_file)
name = form.name.data
start_date = form.start_date.data
end_date = form.end_date.data
activity= form.activity.data
topic = form.topic.data
target_area = form.target_area.data
primary_attribution = form.primary_attribution.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO EducationAndPublicEngagement (Name, StartDate, EndDate, Activity, Topic, TargetArea, PrimaryAttribution, ORCID) VALUES ('{name}','{start_date}','{end_date}','{activity}','{topic}','{target_area}', '{primary_attribution}','{current_user.orcid}'); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('education_and_public_engagement.html', form=form)
education_and_public_engagement_list = current_user.edu_and_public_engagement
print(education_and_public_engagement_list)
return render_template('education_and_public_engagement.html', form=form, list=education_and_public_engagement_list)
@app.route('/awardsInfo', methods=['GET', 'POST'])
@login_required
def awardsInfo():
form = AddAwardsForm(request.form)
awards_list= current_user.awards
if request.method == 'POST':
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
year= form.year.data
award_body= form.award_body.data
details= form.details.data
team_member = form.team_member.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO Awards (Year, AwardingBody, Details, TeamMember, ORCID) VALUES ({year},
'{award_body}','{details}','{team_member}', {current_user.orcid} ); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('awardsInfo'))
return render_template('awardsInfo.html', form=form, list=awards_list)
@app.route('/team_members_info', methods=['GET', 'POST'])
@login_required
def team_members_info():
#Creates proposal form
form = AddTeamMembersForm(request.form)
team= Team.query.filter_by(team_leader=current_user.orcid).all()
if team==0:
if request.method == 'POST':
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
start_date = form.start_date.data
departure_date = form.departure_date.data
name = form.name.data
position = form.position.data
team_id = form.team_id.data
primary_attribution = form.primary_attribution.data
orcid = form.orcid.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO TeamMembers (StartDate, DepartureDate, Name, position,PrimaryAttribution,TeamID, ORCID) VALUES ('{start_date}',
'{departure_date}', '{name}','{position}','{primary_attribution}', {team.team_id}, {orcid} ); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('team_member_info'))
return render_template('team_members_info.html', form=form)
#team_members_list= TeamMembers.query.filter_by(team_id=team.team_id).all()
if request.method == 'POST':
print(form.errors)
#if input validates pushes to db
if form.validate_on_submit():
start_date = form.start_date.data
departure_date = form.departure_date.data
name = form.name.data
position = form.position.data
team_id = form.team_id.data
primary_attribution = form.primary_attribution.data
orcid = form.orcid.data
conn = mysql.connect
cur= conn.cursor()
# execute a query
cur.execute(f"""INSERT INTO TeamMembers (StartDate, DepartureDate, Name, position,PrimaryAttribution,TeamID, ORCID) VALUES ('{start_date}',
'{departure_date}', '{name}','{position}','{primary_attribution}', {team.team_id}, {orcid} ); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('team_members_info.html', form=form)
@app.route('/impacts_info', methods=['GET','POST'])
@login_required
def impacts_info():
form = AddImpactsForm()
impacts = Impacts.query.all()
print(impacts)
if request.method == 'POST':
print(form.errors)
if form.validate_on_submit():
title = form.title.data
category = form.category.data
primary_beneficiary = form.primary_beneficiary.data
primary_attribution = form.primary_attribution.data
impact = Impacts(title = title, category= category, primary_attribution=primary_attribution,
primary_beneficiary=primary_beneficiary, ORCID= current_user.orcid)
db.session.add(impact)
db.session.commit()
if request.method == 'POST':
print(form.errors)
if form.validate_on_submit():
title = form.title.data
category = form.category.data
primary_beneficiary = form.primary_beneficiary.data
primary_attribution = form.primary_attribution.data
conn = mysql.connect
cur = conn.cursor()
cur.execute("""INSERT INTO Impacts (Title,Category,PrimaryBeneficiary,PrimaryAttribution, ORCID) VALUES('{title}','{category}',
'{primary_benificiary}','{primary_attribution}', {current_user.orcid} ); """)
conn.commit()
cur.close()
conn.close()
return redirect(url_for('profile'))
return render_template('impacts_info.html', form=form) # list=impacts_list
else:
impacts_list = current_user.impacts
return render_template('impacts_info.html', form=form ,list=impacts_list)
@app.route('/projects')
@login_required
def projects():
approved_submissions = Submissions.query.filter_by(user=current_user.orcid, status="Approved").all()
all_fundings = Funding.query.filter_by(orcid=current_user.orcid).all()
scientific_reports = Report.query.filter_by(ORCID=current_user.orcid, type="Scientific").all()
financial_reports = Report.query.filter_by(ORCID=current_user.orcid, type="Financial").all()
teams = Team.query.filter_by(team_leader=current_user.orcid).all()
return render_template("projects.html", projects=approved_submissions, fundings=all_fundings, scientific_reports=scientific_reports, financial_reports=financial_reports, teams=teams)
@app.route('/manage_team', methods=["GET", "POST"])
@login_required
def manage_team():
id = request.args.get("id")
team = Team.query.filter_by(team_leader=current_user.orcid, subid=id).first()
addform = AddTeamMemberForm(prefix="addform")
createform = CreateTeamForm(prefix="createform")
editform = EditTeamMemberForm(prefix="editform")
deleteform = DeleteTeamMemberForm(prefix="deleteform")
if team:
if addform.submit.data and addform.validate():
project = Submissions.query.filter_by(subid=id).first()
researcher = User.query.filter_by(orcid=addform.ORCID.data).first()
full_name = researcher.first_name + " " + researcher.last_name
new_team_member = TeamMembers(start_date=addform.start_date.data, departure_date=addform.departure_date.data, name=full_name, position=addform.position.data, primary_attribution=project.location, ORCID=researcher.orcid, team_id=team.team_id)
db.session.add(new_team_member)
if deleteform.delete.data and deleteform.validate():
orcid = request.args.get("ORCID")
team_member = TeamMembers.query.filter_by(ORCID=orcid).first()
db.session.delete(team_member)
if editform.submit.data and editform.validate():
orcid = request.args.get("ORCID")
team_member = TeamMembers.query.filter_by(ORCID=orcid).first()
if editform.start_date.data:
team_member.start_date = editform.start_date.data
if editform.departure_date.data:
team_member.departure_date = editform.departure_date.data
if editform.position.data:
team_member.position = editform.position.data
if editform.primary_attribution.data:
team_member.primary_attribution = editform.primary_attribution.data
db.session.commit()
team_members = TeamMembers.query.filter_by(team_id=team.team_id).all()
return render_template("manage_team.html", team=team, team_members=team_members, id=id, addform=addform, createform=createform, deleteform=deleteform, editform=editform)
if createform.create.data and createform.validate():
team = Team(team_leader=current_user.orcid, subid=id)
print("team created")
db.session.add(team)
db.session.commit()
return redirect(url_for("manage_team", team=team, id=id, addform=addform, createform=createform))
return render_template("manage_team.html", team=team, createform=createform, id=id, addform=addform)
@login_manager.unauthorized_handler
def unauthorized_callback():
return redirect('/sign_in?next=' + request.path)
@app.route('/profile')
@login_required
def profile():
return render_template('profile.html')
@app.route('/logout')
@login_required
def logout():
logout_user() # logs the user out
return redirect(url_for('index')) # or return a log out page
@app.route('/submitted')
@login_required
def submitted():
return render_template('submitted.html')
@app.route('/manage', methods=['GET', 'POST'])
@login_required
def manage():
form = ManageForm()
if current_user.type == "Admin":
researchers = []
all_users = User.query.all()
for each in all_users:
if each.orcid != current_user.orcid:
researchers.append(each)
form.researcher.choices = [(user.orcid, "%s - %s %s. Role = %s" % (user.orcid, user.first_name, user.last_name, user.type)) for user in researchers]
print(researchers)
if request.method == "POST":
print(form.researcher.data)
print(form.role.data)
if form.submit.data:
researcher = User.query.filter_by(orcid=form.researcher.data).first()
newRole = form.role.data
if researcher.orcid == current_user.orcid:
flash("You can't change your own role unfortunately", category="unauthorised")
return redirect(url_for('manage'))
if researcher.type == "Admin":
flash("You can't change another admin's role", category="unauthorised")
return redirect(url_for('manage'))
researcher.type = newRole
db.session.commit()
flash("Role have been updated", category="success")
return redirect(url_for('manage'))
return render_template('manage.html', form=form, researchers=researchers)
return render_template('manage.html', form=form, researchers=researchers)
else:
flash("You need to be an admin to manage others.", category="unauthorised")
return redirect(url_for('manage'))
@app.route("/grants")
@login_required
def grants():
#Show the calls that have been approved.For that user
#For that application they need to add Team members[a link]
#when grant is approved by admin we need to insert stuff into team table
#the page will look like profile page
#with application info [new page]
#with the team info as well
#Reports
#fin and sci
return render_template("grants.html")
def getProfileInfo():
profileInfo = 0
education = current_user.education
employment = current_user.education
societies = current_user.societies
awards = current_user.awards
funding = current_user.funding
impacts = current_user.impacts
inno_and_comm = current_user.inno_and_comm
publications = current_user.publications
presentations = current_user.presentations
collab = current_user.collab
organised_events = current_user.organised_events
edu_and_public_engagement = current_user.edu_and_public_engagement
if len(education) < 1:
profileInfo += 1
if len(employment) < 1:
profileInfo += 1
if len(societies) < 1:
profileInfo += 1
if len(awards) < 1:
profileInfo += 1
if len(funding) < 1:
profileInfo += 1
if len(impacts) < 1:
profileInfo += 1
if len(inno_and_comm) < 1:
profileInfo += 1
if len(publications) < 1:
profileInfo += 1
if len(presentations) < 1:
profileInfo += 1
if len(collab) < 1:
profileInfo += 1
if len(organised_events) < 1:
profileInfo += 1
if len(edu_and_public_engagement) < 1:
profileInfo += 1
return profileInfo
if __name__ == "__main__":
app.run(debug=True)
| johnny1304/Team9 | app.py | app.py | py | 123,592 | python | en | code | 2 | github-code | 36 | [
{
"api_name": "flask.Flask",
"line_number": 22,
"usage_type": "call"
},
{
"api_name": "flask_bootstrap.Bootstrap",
"line_number": 27,
"usage_type": "call"
},
{
"api_name": "flask_login.LoginManager",
"line_number": 28,
"usage_type": "call"
},
{
"api_name": "flask_... |
34647062894 | import os
import socket
import threading
import time
import tkinter as tk
import tkinter.messagebox
from io import BytesIO
import customtkinter
import pafy
import pyperclip
import vlc
from PIL import ImageTk, Image
from pyngrok import ngrok
from pytube import Playlist
from pytube import YouTube
from requests import get
global Video_info
Video_info = ["None", "False", "Video_time_sek"]
global sock
global Hoster
global vra
Hoster = 0
customtkinter.set_appearance_mode("dark")
customtkinter.set_default_color_theme("sallify.json")
print(os.path.dirname(os.path.abspath(__file__)))
global False_Call
def play_song(url, volume):
global Video_info
Video_info[0] = url
global player
global video
video = pafy.new(url)
best = video.getbestaudio()
play_url = best.url
instance = vlc.Instance()
player = instance.media_player_new()
media = instance.media_new(play_url)
media.get_mrl()
player.set_media(media)
player.play()
player.audio_set_volume(int(volume))
def start_server():
global sock
ngrok.set_auth_token("2AYWrd8D7u9WAhpit1cRzCclzt4_47YboKYHbaAFqx2VvbQbU")
host = socket.gethostname()
port = 1509
# Create a TCP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Bind a local socket to the port
server_address = ("", port)
sock.bind(server_address)
sock.listen(1)
# Open a ngrok tunnel to the socket
public_url = ngrok.connect(port, "tcp", options={"remote_addr": "{}:{}".format(host, port)})
print("ngrok tunnel \"{}\" -> \"tcp://127.0.0.1:{}/\"".format(public_url, port))
global code
code = str(public_url)[20] + str(public_url)[34:40]
global Video_info
while True:
connection = None
try:
# Wait for a connection
print("\nWaiting for a connection ...")
connection, client_address = sock.accept()
print("... connection established from {}".format(client_address))
# Receive the message, send a response
while True:
data = connection.recv(1024)
if data:
Request = data.decode("utf-8")
print("Sending: {}".format(str(Video_info)))
connection.sendall(str(Video_info).encode("utf-8"))
else:
break
except KeyboardInterrupt:
print(" Shutting down server.")
if connection:
connection.close()
break
sock.close()
"""with open('Playlists.txt', 'rb') as f:
Songs = load(f)"""
Songs_Full = []
Songs = []
url = "https://youtu.be/1VD17MgCMhM"
video = YouTube(url)
Songs_Half = video.description
for i in Songs_Half.split(","):
Songs.append(i.split("¦"))
root = customtkinter.CTk()
root.title('Sallify')
root.geometry("1000x500+50+50")
root.iconbitmap("Assets/Sqaure.ico")
Icon = ImageTk.PhotoImage(Image.open("Assets/Green.png"))
panel = tk.Label(root, image=Icon, bg="black")
Ican = ImageTk.PhotoImage(Image.open("Assets/Reload_white.png"))
img = []
def load_songs():
try:
for i in Songs:
p = Playlist(i[1])
y = YouTube(p.video_urls[0])
url = y.thumbnail_url
response = get(url)
img_data = response.content
img.append(ImageTk.PhotoImage(Image.open(BytesIO(img_data))))
except:
tkinter.messagebox.showerror(root, title="Conection error",message="Please make wifi good and restart")
load_s = threading.Thread(target=load_songs)
load_s.start()
def showimg(e):
a = lst.curselection()
fname = lst.get(a)
for i in range(len(Songs)):
if Songs[i][0] == fname:
try:
panel.configure(image=img[i])
panel.pack(side="bottom", fill="both", expand="yes")
panel.update()
except:
panel.configure(image=Ican)
panel.pack(side="bottom", fill="both", expand="yes")
panel.update()
def Start_playlist():
subroot = customtkinter.CTk()
subroot.geometry("490x340")
subroot.grid_columnconfigure(0, weight=1)
subroot.grid_rowconfigure(0, weight=1)
playlist = tk.Listbox(subroot, selectmode=tk.SINGLE, bg="black", fg="white", width=40)
playlist.config(font=('Roboto', 15))
playlist.grid(columnspan=10, sticky="news")
Z = lst.curselection()
Aname = lst.get(Z)
def threaded_Loading():
for e in range(len(Songs)):
if Songs[e][0] == Aname:
p = Playlist(Songs[e][1])
print(p.title)
for Url in p.video_urls:
Name = YouTube(Url).title
playlist.insert(tk.END, Name+" "*1000+","+Url)
a = threading.Thread(target=threaded_Loading)
a.start()
def test(e):
print("select")
playlist.bind("<<ListboxSelect>>", test)
def Load_Song(Link):
global Video_info
Video_info[0] = Link
try:
player.stop()
except:
pass
play_song(Link, Volumeslider.get())
Timeslider.config(from_=0, to=video.length)
Timeslider.set(0)
z = threading.Thread(target=Has_ended)
z.start()
def Has_ended():
global Video_info
global False_Call
while True:
False_Call = True
global Video_info
Timeslider.set(video.length*(player.get_position()))
Video_info[2] = round(video.length*(player.get_position()))
time.sleep(5)
try:
if str(player.get_state()) == "State.Ended":
Next_int = -1
print(player.get_state())
while True:
Next_int += 1
if video.title in str(playlist.get(Next_int)):
try:
print((playlist.get(Next_int+1)).split(",")[0])
Load_Song((playlist.get(Next_int+1)).split(",")[1])
break
except:
Load_Song((playlist.get(0)).split(",")[1])
break
print("First song")
break
else:
print(str(player.get_state()))
except NameError as e:
print(e)
def playSong():
try:
player.stop()
except:
pass
Puase_Resumebtn.config(text=" ⏸ ")
pay = playlist.curselection()
gayname = playlist.get(pay)
play_song(gayname.split(",")[1], Volumeslider.get())
Timeslider.config(from_=0, to=video.length)
Timeslider.set(0)
z = threading.Thread(target=Has_ended)
z.start()
Volumeslider.set(player.audio_get_volume())
def Puase_ResumeSong():
print(player.is_playing())
if player.is_playing() == 1:
player.pause()
Video_info[1] = "True"
Puase_Resumebtn.config(text=" ▶ ")
elif player.is_playing() == 0:
Video_info[1] = "False"
Puase_Resumebtn.config(text=" ⏸ ")
player.pause()
def Change_volume(e):
try:
text = ("Volume : "+str(round(e)))
player.audio_set_volume(int(e)*5)
VolumeEntry.delete(0,tk.END)
VolumeEntry.insert(0,text)
except:
pass
def Change_Loacation(e):
global False_Call
try:
if False_Call:
Video_info[2] = e
False_Call = False
elif not False_Call:
Video_info[2] = e
player.set_time(int(e)*1000)
False_Call = False
except:
pass
def Enterred(e):
try:
Text = int(VolumeEntry.get())
if Text > 0 and Text < 200:
Volumeslider.set(Text)
except:
text = ("Volume : " + str(round(player.audio_get_volume()/5)))
VolumeEntry.delete(0, tk.END)
VolumeEntry.insert(0, text)
def CLicked(e):
VolumeEntry.configure(state=tk.NORMAL)
VolumeEntry.delete(0, tk.END)
subroot.title('Sallify')
subroot.iconbitmap("Assets/Sqaure.ico")
songstatus = tk.StringVar()
songstatus.set("choosing")
# playlist---------------
playbtn = customtkinter.CTkButton(subroot, text="play", command=playSong, text_font=('Roboto', 20))
# playbtn.config(font=('arial', 20), bg="black", fg="white", padx=70, pady=6)
playbtn.grid(row=1, rowspan=2, column=0, sticky="news")
Puase_Resumebtn = customtkinter.CTkButton(subroot, text=" ⏸ ", command=Puase_ResumeSong)
# Puase_Resumebtn.config(font=('arial', 20), bg="black", fg="white", padx=7, pady=6)
Puase_Resumebtn.grid(row=1, rowspan=2, column=1, sticky="NSEW")
# Canvy = customtkinter.CTkFrame(subroot)
# Canvy.grid(row=1, column=2, sticky="ew", )
VolumeEntry = customtkinter.CTkEntry(subroot,placeholder_text="Volume")
VolumeEntry.config()
# Volumeslider.config(font=('arial', 20), bg="black", fg="white",length=150)
VolumeEntry.grid(row=1, column=2)
Volumeslider = customtkinter.CTkSlider(subroot, from_=1,to=100, orient='horizontal',command=Change_volume)
Volumeslider.set(20)
# Volumeslider.config(font=('arial', 20), bg="black", fg="white",length=150)
Volumeslider.grid(row=2, column=2)
Timeslider = customtkinter.CTkSlider(subroot, from_=0,to=100, orient='horizontal',command=Change_Loacation)
# Timeslider.config(font=('arial', 20), bg="gray", fg="white",length=450)
Timeslider.grid(row=3, column=0,columnspan=4, sticky="NSEW")
VolumeEntry.bind('<Button-1>', CLicked)
VolumeEntry.bind('<Return>', Enterred)
subroot.mainloop()
def start_threaded_playlist():
# x =threading.Thread(target=Start_playlist)
# x.start()
Start_playlist()
def client_listen_along(Code):
while True:
time.sleep(1)
Full_info = Code.split(":")
host = f"{Full_info[0]}.tcp.ngrok.io"
port = str(Full_info[1])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = (host, port)
sock.connect(server_address)
print("Connected to {}:{}".format(host, port))
message = "Video"
print("Sending: {}".format(message))
sock.sendall(message.encode("utf-8"))
data_received = 0
data_expected = len(message)
while data_received < data_expected:
data = sock.recv(1024)
data_received += len(data)
print(data.decode("utf-8"))
sock.close()
sock.close()
def Client_setup():
Host_b.pack_forget()
Join_b.pack_forget()
Ask_code.pack(side="right", fill=tk.X)
start.pack(side="right", fill=tk.X)
Filan_audio_bar.pack(side="right", fill=tk.X)
def start_threaded_listen_along():
if stream_Buton.get():
tkinter.messagebox.showinfo(message="Please full screen window to see full gui")
Host_b.pack(side="right", fill=tk.X)
Join_b.pack(side="right", fill=tk.X)
else:
vra = False
Filan_audio_bar.pack_forget()
Host_b.pack_forget()
Join_b.pack_forget()
stop.pack_forget()
Ask_code.pack_forget()
copyCode.pack_forget()
start.pack_forget()
ngrok.kill()
def start_server_setup():
server = threading.Thread(target=start_server)
server.start()
Host_b.pack_forget()
Join_b.pack_forget()
stop.pack(side="right", fill=tk.X)
copyCode.pack(side="right", fill=tk.X)
def stop_server():
ngrok.kill()
global vra
vra = False
stream_Buton.toggle()
global old_hoho
old_hoho = Video_info
old_hoho[1] == "False"
global vra
vra = False
def vra_serber_vir_informasie():
global old_hoho
while vra:
print(vra)
time.sleep(1)
gagagagaag = Ask_code.get().split(":")
host = f"{gagagagaag[0]}.tcp.ngrok.io"
port = int(gagagagaag[1])
pock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = (host, port)
pock.connect(server_address)
message = "Video"
pock.sendall(message.encode("utf-8"))
data_received = 0
data_expected = len(message)
while data_received < data_expected:
data = pock.recv(1024)
data_received += len(data)
time.sleep(2)
hohoho = data.decode("utf-8")
hohoho = hohoho.replace(" ", "").replace("'", "").replace("[", "").replace("]", "")
hohoho = hohoho.split(",")
if hohoho != old_hoho:
if hohoho[0] != old_hoho[0]:
try:
player.stop()
except:
pass
play_song(hohoho[0], Filan_audio_bar.get())
old_hoho = hohoho
if hohoho[1] != old_hoho[1]:
print(str(hohoho) + " Recieved")
print(str(old_hoho) + " had")
old_hoho = hohoho
if hohoho[1] == "True":
print("pausing")
if player.is_playing() == 1:
player.pause()
elif hohoho[1] == "False":
if player.is_playing() == 0:
print("playing")
player.pause()
if hohoho[2] != old_hoho[2]:
if int(hohoho[2]) - 5 > round(video.length*(player.get_position())):
try:
player.set_time(int(hohoho[2])*1000)
except:
pass
elif int(hohoho[2]) + 5 < round(video.length*(player.get_position())):
try:
player.set_time(int(hohoho[2]) * 1000)
except:
pass
old_hoho = hohoho
pock.close()
pock.close()
def join_server():
global vra
print("Joining")
vra = True
client = threading.Thread(target=vra_serber_vir_informasie)
client.start()
def Copy_code():
global code
pyperclip.copy(code)
def Final_audio_slider(e):
try:
player.audio_set_volume(int(e))
except:
pass
def on_closing():
print("closing")
ngrok.kill()
try:
player.stop()
except:
pass
root.destroy()
quit()
quit()
root.protocol("WM_DELETE_WINDOW", on_closing)
Filan_audio_bar = customtkinter.CTkSlider(from_=1,to=100, orient='horizontal', command=Final_audio_slider)
subbie = customtkinter.CTk()
copyCode = customtkinter.CTkButton(text="Copy code", command=Copy_code)
stop = customtkinter.CTkButton(text="Stop", command=stop_server)
start = customtkinter.CTkButton(text="Join", command=join_server)
Ask_code = customtkinter.CTkEntry(placeholder_text="Paste Code here")
Host_b = customtkinter.CTkButton(text="Host listen along", command=start_server_setup)
Join_b = customtkinter.CTkButton(text="Join listen along", command=Client_setup)
stream_Buton = customtkinter.CTkSwitch(text="Listen along",command=start_threaded_listen_along )
stream_Buton.pack(side="top",fill=tk.X)
b1 = customtkinter.CTkButton(text="Play",command=start_threaded_playlist)
b1.pack(side="bottom",fill=tk.X)
lst = tk.Listbox(root, bg="black", fg="white")
lst.pack(side="left",fill="both", ipadx=20)
for fname in Songs:
lst.insert(tk.END, fname[0])
lst.config(font=('Roboto', 12))
lst.bind("<<ListboxSelect>>", showimg)
panel.pack(side="bottom", fill="both", expand="yes", ipadx=10)
# panel.pack(side="bottom", fill="both", expand="yes")
print("done")
root.mainloop() | Salodo/Sallify.py | Sallify.py | Sallify.py | py | 15,965 | python | en | code | 1 | github-code | 1 | [
{
"api_name": "customtkinter.set_appearance_mode",
"line_number": 27,
"usage_type": "call"
},
{
"api_name": "customtkinter.set_default_color_theme",
"line_number": 28,
"usage_type": "call"
},
{
"api_name": "os.path.dirname",
"line_number": 29,
"usage_type": "call"
},
... |
8030433043 | import base64
import io
import os
from PIL import Image, ImageDraw, ImageFont
import requests
class WelcomeCard:
BASE_FONT = ImageFont.truetype(
os.path.abspath("ether/assets/fonts/Inter-Medium.ttf"), 16
)
WELCOME_FONT = ImageFont.truetype(
os.path.abspath("ether/assets/fonts/Inter-Bold.ttf"), 24
)
MASK = Image.open("ether/assets/mask.png", "r").convert("L").resize((100, 100))
@classmethod
def create_card(cls, user, guild):
img = Image.new("RGBA", (400, 200), (231, 228, 220))
# Profile Picture
r = requests.get(user.display_avatar.with_size(256).url)
pp = Image.open(io.BytesIO(r.content))
pp = pp.resize((100, 100))
img.paste(pp, (150, 25), cls.MASK)
draw = ImageDraw.Draw(img)
# Welcome Text
draw.text(
xy=(200, 140),
text="Welcome!",
fill=(64, 64, 64),
font=cls.WELCOME_FONT,
anchor="ma",
)
# Name
draw.text(
xy=(200, 170),
text=f"{user.display_name[:20]}",
fill=(64, 64, 64),
font=cls.BASE_FONT,
anchor="ma",
)
# Convert to Base64
with io.BytesIO() as buffer:
img.save(buffer, format="PNG", quality=75)
buffer.seek(0)
result = base64.b64encode(buffer.getvalue()).decode()
return io.BytesIO(base64.b64decode(result))
| Ether-DiscordBot/Ether-Bot | ether/cogs/event/welcomecard.py | welcomecard.py | py | 1,467 | python | en | code | 4 | github-code | 1 | [
{
"api_name": "PIL.ImageFont.truetype",
"line_number": 10,
"usage_type": "call"
},
{
"api_name": "PIL.ImageFont",
"line_number": 10,
"usage_type": "name"
},
{
"api_name": "os.path.abspath",
"line_number": 11,
"usage_type": "call"
},
{
"api_name": "os.path",
"l... |
7293405146 | import requests
import urllib.parse as urlparse
import json,sys
from pprint import pprint
isErrorbased = ['"', "'", '--']
isJson = []
CYELL = '\033[1;93m'
CENDYELL = '\033[0m'
CGRE = '\033[1;92m'
CYAN = '\033[1;36m'
RED = '\033[1;31m'
class SimpleSqlCheck():
def __init__(self, isUrl, isLocation):
self.isUrl = isUrl
self.isLocation = isLocation
def ParsingJson(self):
if (self.isLocation == False):
print (RED,"[WARNING] Please add specifict file json location example -j /home/user/api.json",CENDYELL)
sys.exit(0)
else :
print ("[INFO] Parsing Json where method GET ..")
try:
with open(self.isLocation) as f:
data = json.load(f)
isItems = data['item']
for items in isItems:
try:
item = items['item']
except (TypeError, KeyError):
pass
for i in item:
try:
isMethod = i['request']['method']
if (isMethod == 'GET'):
isJson.append(i)
except (TypeError, KeyError):
pass
except (FileNotFoundError) :
print (RED,"[WARNING] Please add specifict file json location example -j /home/user/api.json ", CENDYELL)
sys.exit(0)
def isCheckNormalQuery(self):
try:
isHeaders = { 'userToken' : 'null'}
isStatus = (requests.get(self.isUrl, params=isHeaders , timeout=10))
if (isStatus.status_code == 200):
print ("[INFO] status OK")
return isStatus.headers['Content-Length']
except Exception as e:
print (RED,"[WARNING] Oops Request timeout ",e, CENDYELL)
sys.exit(0)
def isParsingUrl(self):
print (CYAN,"[INFO] is Parsing url ..", CENDYELL)
# pprint (isJson)
print (CYAN,"[INFO] Is url detected ..", CENDYELL)
for isurl in isJson:
print (isurl['request']['url']['raw'])
def IsQueryErrors(self, isContentLength):
print ("[INFO] Checking Error Based ..")
# isQueryParsed = urlparse.urlparse(self.isUrl).query
# isQuery = isQueryParsed.split('&')
# print(isQuery)
parsed = urlparse.urlparse(self.isUrl).query
print (urlparse.parse_qs(parsed))
# querys = parsed.query.split("&")
# result = []
# for query in querys:
# for pairs in isErrorbased :
# print (query)
# new_query = "&".join([ "{}{}".format(query, pairs)])
# print (new_query)
# parsed = parsed._replace(query=new_query)
# result.append(urlparse.urlunparse(parsed))
# print (result)
def CheckSqlInjection(self):
if (self.isLocation != False):
self.ParsingJson()
self.isParsingUrl()
else:
lenght = self.isCheckNormalQuery()
self.IsQueryErrors(lenght) | wahyuhadi/AutoSecurity-Check | Controllers/SqlInjection/sql.py | sql.py | py | 3,251 | python | en | code | 7 | github-code | 1 | [
{
"api_name": "sys.exit",
"line_number": 26,
"usage_type": "call"
},
{
"api_name": "json.load",
"line_number": 31,
"usage_type": "call"
},
{
"api_name": "sys.exit",
"line_number": 48,
"usage_type": "call"
},
{
"api_name": "requests.get",
"line_number": 53,
... |
73574411873 | from django.shortcuts import render,get_object_or_404
from django.http import HttpResponse
# Create your views here.
from .serializer import CustomerSerializer, ProductSerializer, SubscriptionSerializer
from .models import Customer, Product, Subscription
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET', 'POST'])
def subscription_list(request):
"""
List all code snippets, or create a new snippet.
"""
if request.method == 'GET':
subscription = Subscription.objects.all()
serializer = SubscriptionSerializer(subscription, many=True)
return Response(serializer.data)
elif request.method == 'POST':
serializer = SubscriptionSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'PATCH'])
def subscription_detail(request, pk):
"""
Retrieve, update or delete a code snippet.
"""
try:
snippet = Subscription.objects.get(pk=pk)
except Subscription.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == 'GET':
serializer = Subscription(snippet)
return Response(serializer.data)
elif request.method == 'PATCH':
serializer = SubscriptionSerializer(snippet, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'POST'])
def customer_list(request):
"""
List all code snippets, or create a new snippet.
"""
if request.method == 'GET':
customer = Customer.objects.all()
serializer = CustomerSerializer(customer, many=True)
return Response(serializer.data)
elif request.method == 'POST':
serializer = CustomerSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'POST'])
def product_list(request):
"""
List all code snippets, or create a new snippet.
"""
if request.method == 'GET':
products = Product.objects.all()
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
elif request.method == 'POST':
serializer = ProductSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET'])
def subscription_check(request,customer_id, product_name):
"""
List all code snippets, or create a new snippet.
"""
if request.method == 'GET':
subscription_check = get_object_or_404(Subscription,customer_id=customer_id, product_name= product_name)
serializer = SubscriptionSerializer(subscription_check)
return Response(serializer.data) | Shulabh-968026/myproject | myapp/views.py | views.py | py | 3,349 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "models.Subscription.objects.all",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "models.Subscription.objects",
"line_number": 17,
"usage_type": "attribute"
},
{
"api_name": "models.Subscription",
"line_number": 17,
"usage_type": "name"
},
{
... |
28154078176 | import os, os.path
import string
import cherrypy
import datetime
import requests
import lxml
import cssselect
import lxml.html
import json
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
#from email.MIMEBase import MIMEBase
from pprint import pprint
class Data(object):
def __init__(self,title,description,link):
self.title = title
self.description = description
self.link = link
class SendMail(object):
def __init__(self):
print('envoi de mail')
def methodetest(self):
print('je suis dans la méthode test')
class MonSiteWeb(object):
global htmlsite
global variable
global tabtitle
global tabdesc
global tablink
global tabind
#global tabdata
global listededonnee
def methodetest(self):
print('je suis dans la méthode test')
@cherrypy.expose
def index(self):
now = datetime.datetime.now()
entetesite = ''' <!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="description" content="">
<meta name="author" content="">
<title>Appel d'offre</title>
<!-- Bootstrap core CSS -->
<link href="/static/bootstrap-3.3.7-dist/vendor/bootstrap/css/bootstrap.min.css" rel="stylesheet">
<!-- Custom styles for this template -->
<link href="/static/bootstrap-3.3.7-dist/css/scrolling-nav.css" rel="stylesheet">
<link rel="stylesheet" type="text/css" href="/static/bootstrap-3.3.7-dist/DataTables/datatables.min.css"/>
</head>
<body id="page-top">
<!-- Navigation -->
<nav class="navbar navbar-expand-lg navbar-dark bg-dark fixed-top" id="mainNav">
<div class="container">
<a class="navbar-brand js-scroll-trigger" href="#page-top"><h4>Socitech</h4></a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarResponsive" aria-controls="navbarResponsive" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarResponsive">
<ul class="navbar-nav ml-auto">
<li class="nav-item">
<a class="nav-link js-scroll-trigger" href="#about">About</a>
</li>
<li class="nav-item">
<a class="nav-link js-scroll-trigger" href="#services">Ajouter un filtre</a>
</li>
<li class="nav-item">
<a class="nav-link js-scroll-trigger" href="methodetest">Action</a>
</li>
</ul>
</div>
</div>
</nav>
<header class="bg-primary text-white">
<div class="container text-center">
<h1> Appel d'offre venant du site global tender</h1>
<p class="lead">resultat du "data scraping" du site global tender</p>
</div>
</header>
<section id="about">
<div class="container-fluid">
<div class="row">
<div class="col-lg-12 mx-auto">
<h2>Resultat de la recherche</h2>
<button type="button" class="btn btn-primary" data-toggle="modal" data-target="#exampleModalLong">
Envoi de la selection
</button>
<table id="example" class="display" style="width:100%">
<thead><tr><th>Date</th><th>Title</th><th>Description</th><th>Link</th></tr></thead>
<tbody>'''
table = "<tr><td>{}</td><td>{}</td><td>{}</td><td><a href=\"{}\">{} </a></td></tr>"
footer = '''
</tbody></table>
</div>
</div>
</div>
</section>
<div class="modal fade" id="exampleModalLong" tabindex="-1" role="dialog" aria-labelledby="exampleModalLongTitle" aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">Contacts</h5></br>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<p>Ajoujer un Contact</p></br>
<form>
<div class="form-row">
<div class="col-lg-6">
<label for="name">Name :</label>
<input type="text" class="form-control" id="name" placeholder="Name" required>
</div>
<div class="col-lg-6">
<label for="email">Email : </label>
<input type="email" class="form-control" id="email" placeholder="EMAIL" required>
</div>
<div class="col-lg-6">
<label for="telephone">Telephone :</label>
<input type="tel" class="form-control" id="telephone" placeholder="Telephone" required>
</div></br>
<div class="col-lg-10">
</br>
<button type="button" class="btn btn-primary" id="addContact">Add Contacts</button>
</div>
</div>
</form>
<hr/>
<table id="adresse" class="display" style="width:100%">
<thead>
<tr><th>NOMS</th><th>EMAIL</th><th>NUMERO</th></tr>
</thead>
<tbody>
<tr><td>Rudy</td><td>rudy@yahoo.fr</td><td>671402318</td></tr>
</tbody>
</table>
</div>
<div class="modal-footer">
<button id="moveContact" type="button" class="btn btn-primary">move selected row</button>
<button id="sendEmail" type="button" class="btn btn-primary">send to selected row</button>
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<!-- Footer -->
<footer class="py-5 bg-dark">
<div class="container">
<p class="m-0 text-center text-white">Copyright © Your Website 2017</p>
</div>
<!-- /.container -->
</footer>
<!-- Bootstrap core JavaScript -->
<script src="/static/bootstrap-3.3.7-dist/vendor/jquery/jquery.min.js"></script>
<script src="/static/bootstrap-3.3.7-dist/vendor/bootstrap/js/bootstrap.bundle.min.js"></script>
<!-- Plugin JavaScript -->
<script src="/static/bootstrap-3.3.7-dist/vendor/jquery-easing/jquery.easing.min.js"></script>
<!-- Custom JavaScript for this theme -->
<script src="/static/bootstrap-3.3.7-dist/js/scrolling-nav.js"></script>
<script type="text/javascript" src="/static/bootstrap-3.3.7-dist/DataTables/datatables.min.js"></script>
<script type="text/javascript" charset="utf-8">
$(document).ready(function() {
var titre = '' ;
var description = '' ;
var lien = '' ;
var olien = "" ;
function Object(titre, description, lien) {
this.titre = titre;
this.description = description;
this.lien = lien;
}
var init = '' ;
var end = '' ;
function Add(init,end){
this.init = init;
this.end = end;
}
var table = $('#example').DataTable();
$('#example tbody').on( 'click', 'tr', function () {
$(this).toggleClass('selected');
} );
$('#adresse tbody').on( 'click', 'tr', function () {
$(this).toggleClass('selected');
} );
$('#addContact').click( function () {
var nom = $('#name').val();
var email = $('#email').val();
var telephone = $('#telephone').val();
var usertable = $('#adresse').DataTable();
if(nom!='' && email!='' && telephone!=''){
usertable.row.add( [
nom,
email,
telephone,
] ).draw( false );
}
} );
$('#moveContact').click( function () {
//alert(nom + ' ' + email + ' ' + telephone );
var usertable = $('#adresse').DataTable();
var rows = usertable.rows( '.selected' ).data();
var indice ;
var ttt = '' ;
for(indice = 0 ; indice < rows.length ; indice++ )
usertable.rows( '.selected' ).remove().draw();
});
$('#sendEmail').click(function (){
var jsontable = $('#example').DataTable();
var jsontableaddresse = $('#adresse').DataTable();
var rows = jsontable.rows( '.selected' ).data();
var rowsaddresse = jsontableaddresse.rows( '.selected' ).data();
var indice ;
var indiceaddresse ;
var object ={};
var add = {} ;
var tabObject ={};
var tabaddresse = {};
//liste des addresses où iront les donnees
var test = rowsaddresse[0][1] ;
for(indiceaddresse = 0 ; indiceaddresse < rowsaddresse.length ; indiceaddresse++){
var chn = rowsaddresse[indiceaddresse][1] ;
var aropos = chn.indexOf('@') ;
var init = chn.slice(0,aropos) ;
var end = chn.slice(aropos+1) ;
add = new Add(init,end);
tabaddresse[indiceaddresse] = add;
}
var jsoAd = JSON.stringify(tabaddresse);
var eltselectAd = rowsaddresse.length ;
//liste des donnees a envoyer
for(indice = 0 ; indice < rows.length ; indice++){
var str = String(rows[indice][3]) ;
var nstr = str.substr(156) ;
var chn = nstr.slice(0,9) ;
object = new Object(rows[indice][1],rows[indice][2],chn);
tabObject[indice] = object ;
}
var resultat = {};
resultat['valeur'] = tabObject ;
var eltselect = rows.length ;
var jsO = JSON.stringify(tabObject); //{"action":"valeur"};//tabObject
$.ajax({
url : 'sendMail',
type : 'POST',
dataType: 'application/json',
data : 'donnee=' + jsO + '&qtdonnee=' + eltselect + '&addresse=' + jsoAd + '&qtAddonnee=' + eltselectAd,
success : function(code, statut){
$('#exampleModalLong').modal('hide');
},
error : function(resultat, statut, erreur){
},
complete : function(resultat, statut){
$('#exampleModalLong').modal('hide');
}
});
});
} );
</script>
<script type="text/javascript">
// For demo to fit into DataTables site builder...
$('#example')
.removeClass( 'display' )
.addClass('table table-striped table-bordered');
$('#adresse')
.removeClass( 'display' )
.addClass('table table-striped table-bordered');
</script>
</body>
</html>
'''
tabtitle = []
tabdesc = []
tablink = []
tabind = []
global tabdata
txt = 'ma variable'
indice = 0
url = 'https://www.globaltenders.com/tenders-cameroon.php'
while(url !='#'):
reponse = requests.get(url)#'http://www.globaltenders.com/'
tree = lxml.html.fromstring(reponse.text)
tdElems = tree.xpath('//table/tr/td[@style="width:70%; vertical-align:top; text-align:left;"]')#|//table/tr/td/a/@href
tdElink = tree.xpath('//table/tr/td/a/@href')
taille = len(tdElems)
indice = 0
intitule =''
adresse = ''
mylist = list()
mydescription = list()
mytitle = list()
listData = list()
while (indice < taille):
if(tdElems[indice].text_content() == ' Technology Hardware and Equipment'):
ltail = indice//2
intitule = ' Technology Hardware and Equipment'
adresse = tdElink[ltail]
mylist.append(adresse)
val = indice+1
mydescription.append(tdElems[val].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[val].text_content(),adresse)
listData.append(data)
print(tdElems[indice].text_content())
#print(adresse)
elif(tdElems[indice].text_content() == ' Smart Cards and other Access Control system , Printing and publishing'):
ltailt = indice//2
intitulet = tdElems[indice].text_content()
adresset = tdElink[ltailt]
mylist.append(adresset)
val = indice+1
mydescription.append(tdElems[val].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[val].text_content(),adresset)
listData.append(data)
print(tdElems[indice].text_content())
#print(adresset)
elif(tdElems[indice].text_content() == ' Software Services'):
ltailf = indice//2
intitulef = tdElems[indice].text_content()
adressef = tdElink[ltailf]
mylist.append(adressef)
val = indice+1
mydescription.append(tdElems[val].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[val].text_content(),adressef)
listData.append(data)
print(tdElems[indice].text_content())
#print(adressef)
elif(tdElems[indice].text_content() == ' Bridges and Tunnels'):
ltails = indice//2
intitules = tdElems[indice].text_content()
adresses = tdElink[ltails]
mylist.append(adresses)
val = indice+1
mydescription.append(tdElems[val].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[val].text_content(),adresses)
listData.append(data)
print(tdElems[indice].text_content())
#print(adresses)
elif(tdElems[indice].text_content() == ' Services'):
ltailn = indice//2
intitulen = tdElems[indice].text_content()
adressen = tdElink[ltailn]
mylist.append(adressen)
val = indice+1
mydescription.append(tdElems[val].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[val].text_content(),adressen)
listData.append(data)
print(tdElems[indice].text_content())
#print(adressen)
elif(tdElems[indice].text_content() == ' Printing and publishing'):
ltailk = indice//2
intitulek = tdElems[indice].text_content()
adressek = tdElink[ltailk]
mylist.append(adressek)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adressek)
listData.append(data)
print(tdElems[indice].text_content())
#print(adressek)
elif(tdElems[indice].text_content() == ' Industry , Technology Hardware and Equipment , Furniture'):
ltaily = indice//2
intituley = tdElems[indice].text_content()
adressey = tdElink[ltaily]
mylist.append(adressey)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adressey)
listData.append(data)
print(tdElems[indice].text_content())
#print(adressey)
elif(tdElems[indice].text_content() == ' Telecommunications , Information Technology (IT) , Consultancy , Services , Infrastructure and construction'):
ltaill = indice//2
intitulel = tdElems[indice].text_content()
adressel = tdElink[ltaill]
mylist.append(adressel)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adressel)
listData.append(data)
print(tdElems[indice].text_content())
#print('*****************')
elif(tdElems[indice].text_content() == ' Industry , Technology Hardware and Equipment'):
ltailycp = indice//2
intituleycp = tdElems[indice].text_content()
adresseycp = tdElink[ltailycp]
mylist.append(adresseycp)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adresseycp)
listData.append(data)
print(tdElems[indice].text_content())
elif(tdElems[indice].text_content() == ' Telecommunications'):
ltailytele = indice//2
intituleytele = tdElems[indice].text_content()
adresseytele = tdElink[ltailytele]
mylist.append(adresseytele)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adresseytele)
listData.append(data)
print(tdElems[indice].text_content())
elif(tdElems[indice].text_content() == ' Technology Hardware and Equipment , Energy, Power and Electrical'):
ltailyHard = indice//2
intituleyHard = tdElems[indice].text_content()
adresseyHard = tdElink[ltailyHard]
mylist.append(adresseyHard)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adresseyHard)
listData.append(data)
print(tdElems[indice].text_content())
elif(tdElems[indice].text_content() == ' Telecommunications , Banking, Finance, Insurance and Securities (BFIS) , Information Technology (IT) , Consultancy'):
ltailyBank = indice//2
intituleyBank = tdElems[indice].text_content()
adresseyBank = tdElink[ltailyBank]
mylist.append(adresseyBank)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adresseyBank)
listData.append(data)
print(tdElems[indice].text_content())
elif(tdElems[indice].text_content() == ' Telecommunications , Information Technology (IT) , Consultancy , Infrastructure and construction , Building'):
ltailyInfor = indice//2
intituleyInfor = tdElems[indice].text_content()
adresseyInfor = tdElink[ltailyInfor]
mylist.append(adresseyInfor)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adresseyInfor)
listData.append(data)
print(tdElems[indice].text_content())
elif(tdElems[indice].text_content() == ' Telecommunications , Information Technology (IT) , Software Services , Consultancy'):
ltailymunic = indice//2
intituleymunic = tdElems[indice].text_content()
adresseymunic = tdElink[ltailymunic]
mylist.append(adresseymunic)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adresseymunic)
listData.append(data)
print(tdElems[indice].text_content())
elif(tdElems[indice].text_content() == ' Telecommunications , Infrastructure and construction'):
ltailyInfras = indice//2
intituleyInfras = tdElems[indice].text_content()
adresseyInfras = tdElink[ltailyInfras]
mylist.append(adresseyInfras)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adresseyInfras)
listData.append(data)
print(tdElems[indice].text_content())
elif(tdElems[indice].text_content() == ' Telecommunications , Information Technology (IT) , Consultancy'):
ltailyConsul = indice//2
intituleyConsul = tdElems[indice].text_content()
adresseyConsul = tdElink[ltailyConsul]
mylist.append(adresseyConsul)
mydescription.append(tdElems[indice+1].text_content())
mytitle.append(tdElems[indice].text_content())
data = Data(tdElems[indice].text_content(),tdElems[indice+1].text_content(),adresseyConsul)
listData.append(data)
print(tdElems[indice].text_content())
indice = indice + 1
tabtitle = tabtitle + mytitle
#tabtitle = mytitle
tabdesc = tabdesc + mydescription
#tabdesc = mydescription
tablink = tablink + mylist
#tablink = mylist
tabdata = listData
#tabdata = listData
#tabind =
#print(listData,'*********************')
tdElemslm = tree.xpath('//select[@style="float:left"]')
mlistlm = tdElemslm[0].getnext().getchildren()
if len(mlistlm) == 2 :
#print(mlistlm[1].xpath('@href')[0])
adressederedirection = mlistlm[1].xpath('@href')[0]
else :
#print(mlistlm[0].xpath('@href')[0])
if(mlistlm[0].text_content() == 'Next Page'):
adressederedirection = mlistlm[0].xpath('@href')[0]
else:
adressederedirection = '#'
url= adressederedirection
#global listData
#listData = list()
listededonnee = list()
nlisted = list()
inlist=0
while inlist < len(tabtitle):
donnee = Data(tabtitle[inlist],tabdesc[inlist],tablink[inlist])
listededonnee.append(donnee)
inlist = inlist+1
print(listededonnee)
print(len(listededonnee),'/*-/*-/*-/*-/*-/*-/*-/*-/*-/*-/*-/*-/*-/*-/*-')
for donnee in listededonnee :
c = 0
nindic = 1
lesindices = []
listedesdons = listededonnee
while nindic < len(listededonnee):
if donnee.title == listededonnee[nindic].title and donnee.description == listededonnee[nindic].description and donnee.link == listededonnee[nindic].link:
c = c+1
if(c>0):
listededonnee.remove(listededonnee[nindic])
nindic = nindic + 1
print(c)
print(listededonnee)
print(len(listededonnee),'/*-+/*-+/*-+/*-+/*-+/*-+/*-+/*-+/*-+/*-+/*-+/*-+/*-+')
#listData = listededonnee
for el in listededonnee :
inc = 0
indel = 0
while indel < len(listededonnee):
if el.title == listededonnee[indel].title and el.description == listededonnee[indel].description and el.link == listededonnee[indel].link:
inc = inc + 1
if inc >1 :
listededonnee.remove(listededonnee[indel])
indel = indel + 1
for iii in listededonnee :
print(iii.title)
pagehtml = ''
for td in listededonnee :
pagehtml = pagehtml + table.format(str(now),td.title,td.description,td.link,td.link)
return entetesite + pagehtml + footer
@cherrypy.expose
def sendMail(self,donnee,qtdonnee,addresse,qtAddonnee):
cherrypy.response.headers['Content-Type'] = 'application/json'
variable = r"""{}"""
data = json.loads(variable.format(donnee))
print(data,'+/++/+/++/++/++/++/++/*+/+*/+*/+*/+*/+*/+*/+*/')
print('---------------------------------------------------')
indice = 0
message = """
<!DOCTYPE html>
<html lang="fr">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="description" content="">
<meta name="author" content="">
<title>Appel d'offre</title>
<!-- Bootstrap core CSS -->
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css" integrity="sha384-9gVQ4dYFwwWSjIDZnLEWnxCjeSWFphJiwGPXr1jddIhOegiu1FwO5qRGvFXOdJZ4" crossorigin="anonymous">
</head>
<body>
<table id="example" style="width:100%; border-collapse: collapse;" class="table table-bordered">
<thead >
<tr><th>Title</th><th>Description</th><th>Link</th></tr>
</thead>
<tbody>"""
tdata = """<tr><td>{}</td><td>{}</td><td><a href=\"{}\">lien du site</a></td></tr>"""
fin = """
</tbody>
</table>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.1.0/js/bootstrap.min.js" integrity="sha384-uefMccjFJAIv6A+rW+L4AHf99KvxDjWSu1z9VI8SKNVmz4sk7buKt/6v9KI65qnm" crossorigin="anonymous"></script>
</body>
</html>
"""
valdonnee = ''
while indice < int(qtdonnee) :
chn = str(indice)
valdonnee = valdonnee + tdata.format(data[chn]['titre'],data[chn]['description'],'http://www.globaltenders.com/auth-tenders.php?action=details&id='+data[chn]['lien'])
indice = indice + 1
indicead = 0
variablead = r"""{}"""
address = json.loads(variable.format(addresse))
lAd = list()
while indicead < int(qtAddonnee) :
chn = str(indicead)
valAd = address[chn]['init']+'@'+ address[chn]['end']
lAd.append(valAd)
print(valAd)
indicead = indicead + 1
MessageSending = message + valdonnee +fin
indiceMail = 0
me = 'tekamfossi@gmail.com'
text = "Appel d'offre !"
part1 = MIMEText(text, 'plain')
part2 = MIMEText(MessageSending, 'html')
msg = MIMEMultipart('alternative')
msg['Subject'] = "Appel d'offre"
msg['From'] = me
msg.attach(part1)
msg.attach(part2)
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(me, "degrace1")
while indiceMail < len(lAd):
msg['To'] = lAd[indiceMail]
server.sendmail(me, lAd[indiceMail], msg.as_string())
indiceMail = indiceMail + 1
server.quit()
lstre = {}
lstre['resultat'] = 'ok'
return ''
#index.exposed = True
#methodetest.exposed = True
#sendMail.exposed = True
if __name__ == '__main__':
print(os.path.dirname(os.getcwd()))
conf = {
'/': {
'tools.sessions.on': True,
'tools.staticdir.root': os.path.abspath(os.getcwd())
},
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir':'./public'
}
}
cherrypy.quickstart(MonSiteWeb(), '/', conf)
#cherrypy.quickstart(MonSiteWeb(), config ="server.conf")
| rudy-stephane/callfortender | webpython.py | webpython.py | py | 31,309 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "datetime.datetime.now",
"line_number": 48,
"usage_type": "call"
},
{
"api_name": "datetime.datetime",
"line_number": 48,
"usage_type": "attribute"
},
{
"api_name": "requests.get",
"line_number": 356,
"usage_type": "call"
},
{
"api_name": "lxml.html.... |
1287821530 | from matplotlib import pyplot as plt
def plot_forest_management(gamma_values, iterations, time_array, rewards, title):
plt.plot(gamma_values, rewards)
plt.ylabel('Rewards')
plt.xlabel('Discount')
plt.title('{} - Reward vs Discount'.format(title))
plt.grid()
plt.show()
plt.plot(gamma_values, iterations)
plt.ylabel('Iterations to Converge')
plt.xlabel('Discount')
plt.title('{} - Convergence vs Discount'.format(title))
plt.grid()
plt.show()
plt.plot(gamma_values, time_array)
plt.title('{} - Execution Time vs Discount'.format(title))
plt.xlabel('Discount')
plt.ylabel('Time Taken (in seconds)')
plt.grid()
plt.show() | mishabuch/Assignment-4 | forest_plots.py | forest_plots.py | py | 694 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "matplotlib.pyplot.plot",
"line_number": 5,
"usage_type": "call"
},
{
"api_name": "matplotlib.pyplot",
"line_number": 5,
"usage_type": "name"
},
{
"api_name": "matplotlib.pyplot.ylabel",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "matplo... |
36938950956 | import os
import zipfile
from conftest import RESOURCES_DIR
def test_zip_file():
with zipfile.ZipFile(os.path.join(RESOURCES_DIR, 'file_hello.zip')) as zip_file:
zip_file.extract('file_hello.txt', path=RESOURCES_DIR)
name_list = zip_file.namelist()
print(name_list)
text = zip_file.read('file_hello.txt')
print(text)
assert 'file_hello.txt' in name_list
assert text == b'Hello'
assert os.path.isfile(os.path.join(RESOURCES_DIR, 'file_hello.txt'))
os.remove(os.path.join(RESOURCES_DIR, 'file_hello.txt'))
| BaykovAleksandr/qa_guru_7_files | tests/test_zip.py | test_zip.py | py | 583 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "zipfile.ZipFile",
"line_number": 7,
"usage_type": "call"
},
{
"api_name": "os.path.join",
"line_number": 7,
"usage_type": "call"
},
{
"api_name": "conftest.RESOURCES_DIR",
"line_number": 7,
"usage_type": "argument"
},
{
"api_name": "os.path",
"l... |
4422298304 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Common fixtures and utils for io tests."""
import copy
import os
import pytest
from orion.core.evc import conflicts
@pytest.fixture()
def config_file():
"""Open config file with new config"""
file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "orion_config.yaml"
)
return open(file_path)
@pytest.fixture()
def old_config_file():
"""Open config file with original config from an experiment in db"""
file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "orion_old_config.yaml"
)
return open(file_path)
@pytest.fixture()
def incomplete_config_file():
"""Open config file with partial database configuration"""
file_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "orion_incomplete_config.yaml"
)
return open(file_path)
@pytest.fixture
def parent_config():
"""Generate a new experiment configuration"""
return dict(_id="test", name="test", metadata={"user": "corneauf"}, version=1)
@pytest.fixture
def child_config(parent_config):
"""Generate a new experiment configuration"""
config = copy.deepcopy(parent_config)
config["_id"] = "test2"
config["refers"] = {"parent_id": "test"}
config["version"] = 2
return config
@pytest.fixture
def experiment_name_conflict(storage, parent_config, child_config):
"""Generate an experiment name conflict"""
exps = storage.fetch_experiments({"name": "test"}) + storage.fetch_experiments(
{"name": "test2"}
)
for exp in exps:
storage.delete_experiment(uid=exp["_id"])
storage.create_experiment(parent_config)
storage.create_experiment(child_config)
return conflicts.ExperimentNameConflict(parent_config, parent_config)
| lebrice/orion | tests/unittests/core/io/conftest.py | conftest.py | py | 1,819 | python | en | code | null | github-code | 1 | [
{
"api_name": "os.path.join",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_number": 17,
"usage_type": "attribute"
},
{
"api_name": "os.path.dirname",
"line_number": 18,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_number"... |
29180364279 | # DSC 510
# Week 11
# Programming Assignment Week 11
# Author: Reenie Christudass
# 05/23/2022
# Change#:1
# Change(s) Made: Cash register program
# Date of Change: 05/23/2022
# Author: Reenie Christudass
# Change Approved by: Michael Eller
# Date Moved to Production: 05/23/2022
import locale
from termcolor import colored
locale.setlocale(locale.LC_ALL, 'en_US')
class CashRegister:
# constructor
def __init__(self):
self.total = 0.0
self.itemCount = 0
# setter
def add_item(self, price):
self.total = self.total + float(price)
self.itemCount = self.itemCount + 1
# getter
def get_total(self):
return self.total
def get_count(self):
return self.itemCount
def process_line(create_list):
print("")
print("---------------THANK YOU FOR USING CASH REGISTER---------------------------")
print("")
# Print all items purchased
if create_list:
for n in range(len(create_list)):
text = colored(create_list[n], 'blue')
print("Item Purchased " + str(n + 1) + ":" + '$ ' + text)
else:
print("")
print("No items selected")
def main():
print("WELCOME TO CASH REGISTER!!!!")
register = CashRegister()
create_list = []
# variable declared to have the program in loop unless exited by the user
item_loop = 0
while item_loop >= 0:
# Allow user to enter the price of the item
if item_loop == 0:
input_message = input("Enter the price of the item or enter exit to quit the program:")
elif item_loop > 0:
input_message = input("Enter the price of another item or enter exit to quit the program:")
# read the file
if input_message != 'exit':
try:
# program will convert the user input value to float
input_message = float(input_message)
create_list.append(input_message)
except ValueError as e:
# if the program couldn't convert the value to float , it will prompt the user to enter value in numeric
print("Please enter the numeric value for item")
continue
register.add_item(input_message)
# if the user input is equal to exit
elif input_message == 'exit':
break
print()
item_loop = item_loop + 1
process_line(create_list)
print("")
message_output = ("Total Cost of the items purchased : {}".format(locale.currency(register.get_total())))
print(colored(message_output, 'yellow'))
message_output = ("Total Items Purchased: {}".format(colored(register.get_count()), 'blue'))
print(colored(message_output, 'magenta'))
print("")
print("Thank you and have a great day")
# using special variable
if __name__ == "__main__":
main()
| reeniecd/DSC510-T301 | Week 11 assignment.py | Week 11 assignment.py | py | 2,870 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "locale.setlocale",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "locale.LC_ALL",
"line_number": 17,
"usage_type": "attribute"
},
{
"api_name": "termcolor.colored",
"line_number": 47,
"usage_type": "call"
},
{
"api_name": "locale.currency... |
72646957153 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from rest_framework.decorators import api_view
from django.shortcuts import get_object_or_404
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import *
from .serializers import *
# Create your views here.
def index(request):
return HttpResponse('<h3>The index page of App</h3>')
class Wells(APIView):
def get(self,request):
welllist = Well.objects.all()
serializer=WellSerializer(welllist, many=True)
return Response(serializer.data)
def post(self,request):
pass
def maps(request):
#template = loader.get_template('maps/maps.html')
markers1={0:{'lat':-33.7772, 'long':151.1241}}
return render(request, 'app/maps.html',{'points':markers1})
class MapList(APIView):
def get(self, request):
maplist = Farmpoints.objects.all()
serializer=FarmpointSerializer(maplist, many=True)
return Response(serializer.data)
def post(self):
pass
#class News(APIView):
# def get(self,request):
# query= Farmpoints.objects.raw('SELECT * FROM Farm JOIN Farmer JOIN Farmpoints WHERE Farmpoints.Farm_id= 1')
# return render(request, 'app/maps.html',{'farmdet':query})
# query=Farmer.objects.raw('SELECT * FROM app_farmer JOIN app_farm ON app_farmer.id=1')
# return render(request, 'app/abc.html',{'resu':query})
@api_view(['GET', 'PUT', 'DELETE'])
def MapDetail(request,pk, format=None):
try:
map_detail=Farmpoints.objects.get(pk=pk)
except Farmpoints.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method=='GET':
serializer=FarmpointSerializer(map_detail)
return Response(serializer.data)
elif request.method=='PUT':
serializer=FarmpointSerializer(map_detail, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method=='DELETE':
map_detail.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class FarmerList(APIView):
def get(self,request):
farmerlist = Farmer.objects.all()
serializer=FarmerSerializer(farmerlist, many=True)
return Response(serializer.data)
def post(self,request):
pass
@api_view(['GET', 'PUT', 'DELETE'])
def FarmerDetail(request,pk, format=None):
try:
f_detail=Farmer.objects.get(pk=pk)
except Farmer.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method=='GET':
serializer=FarmerSerializer(f_detail)
return Response(serializer.data)
elif request.method=='PUT':
serializer=FarmerSerializer(f_detail, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method=='DELETE':
f_detail.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class HouseHoldList(APIView):
def get(self, request):
hhlist = HouseHold.objects.all()
serializer=HouseHoldSerializer(hhlist, many=True)
return JsonResponse(serializer.data, safe=False)
def post(self):
pass
@api_view(['GET', 'PUT', 'DELETE'])
def HouseHoldDetail(request,pk, format=None):
try:
hh_detail=HouseHold.objects.get(pk=pk)
except HouseHold.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method=='GET':
serializer=HouseHoldSerializer(hh_detail)
return Response(serializer.data)
elif request.method=='PUT':
serializer=HouseHoldSerializer(hh_detail, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method=='DELETE':
hh_detail.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class MemberList(APIView):
def get(self, request):
memberlist = Member.objects.all()
serializer=MemberSerializer(memberlist, many=True)
return Response(serializer.data)
def post(self):
pass
@api_view(['GET', 'PUT', 'DELETE'])
def MemberDetail(request,pk, format=None):
try:
m_detail=Member.objects.get(pk=pk)
except Member.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method=='GET':
serializer=MemberSerializer(m_detail)
return Response(serializer.data)
elif request.method=='PUT':
serializer=MemberSerializer(m_detail, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method=='DELETE':
m_detail.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class FarmList(APIView):
def get(self, request):
flist = Farm.objects.all()
serializer=FarmSerializer(flist, many=True)
return Response(serializer.data)
def post(self):
pass
| k-root/its_farm | app/views.py | views.py | py | 5,031 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "django.http.HttpResponse",
"line_number": 15,
"usage_type": "call"
},
{
"api_name": "rest_framework.views.APIView",
"line_number": 17,
"usage_type": "name"
},
{
"api_name": "rest_framework.response.Response",
"line_number": 21,
"usage_type": "call"
},
{... |
897362652 | import os
from flask import Blueprint, jsonify
from ..decorators import required_login
from ..utilities import create_jwt
jwt_rest_bp = Blueprint('jwt_rest_bp', __name__)
@jwt_rest_bp.route('/api/v1/user', methods=['GET'])
@required_login
def jwt(user):
username = user.username
email = user.email
token = create_jwt(username, email)
return jsonify(token=token.decode('utf-8')), 200
@jwt_rest_bp.route('/api/v1/service_get_pub', methods=['GET'])
def get_pub():
pub = os.environ.get('public_key')
return jsonify(pub=pub), 200
| CossackDex/ZPI_AuthServer | auth_server_application/rest_jwt/jwt_rest_routes.py | jwt_rest_routes.py | py | 556 | python | en | code | 1 | github-code | 1 | [
{
"api_name": "flask.Blueprint",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "utilities.create_jwt",
"line_number": 16,
"usage_type": "call"
},
{
"api_name": "flask.jsonify",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "decorators.required... |
637784 | """ Module for tables of the Auto Typing paper
"""
# Imports
from __future__ import print_function, absolute_import, division, unicode_literals
import numpy as np
import glob, os, sys
import warnings
import pdb
from pkg_resources import resource_filename
from astropy import units as u
from astropy.table import Table
from astropy.coordinates import SkyCoord, match_coordinates_sky
from astropy.time import Time
from linetools import utils as ltu
from spit import labels as spit_lbl
ddict = {'JAN': 1, 'FEB': 2, 'MAR': 3, 'APR': 4, 'MAY': 5,
'JUN': 6, 'JUNE': 6, 'JUL': 7, 'JULY': 7, 'AUG': 8,
'SEP': 9, 'SEPT': 9, 'OCT': 10, 'NOV': 11, 'DEC': 12}
# Local
#sys.path.append(os.path.abspath("../Analysis/py"))
#sys.path.append(os.path.abspath("../Vetting/py"))
#from vette_dr7 import load_ml_dr7
def mktab_images(outfil='tab_images.tex', sub=False):
# Path
path = os.getenv('SPIT_DATA')+'/Kast/FITS/'
if sub:
outfil = 'tab_images_sub.tex'
# Scan image sets
types, dates, frames, pis, flags, sets = [], [], [], [], [], []
ntest, ntrain, nvalid = 0, 0, 0
for iset in ['test', 'train', 'validation']:
for itype in ['arc', 'bias', 'flat', 'science', 'standard']:
files = glob.glob(path+'{:s}/{:s}/*fits.gz'.format(iset,itype))
nimg = len(files)
print("There are {:d} images of type {:s} in set {:s}".format(nimg, itype, iset))
#
types += [itype]*nimg
sets += [iset]*nimg
if iset == 'test':
ntest += nimg
elif iset == 'train':
ntrain += nimg
elif iset == 'validation':
nvalid += nimg
# Loop on the images
for ipath in files:
# Parse
ifile = os.path.basename(ipath)
if ifile[0] == '2':
pis += ['Prochaska']
# Find date
year = ifile[0:4]
try:
month = ddict[ifile[4:7].upper()]
except KeyError:
pdb.set_trace()
day = ifile[7:9]
dates.append('{:s}-{:d}-{:s}'.format(year,month,day))
# Frame
i1 = ifile.find('.fits')
frames.append(ifile[10:i1])
else:
pis += ['Hsyu']
# Find date
i0 = ifile.find('_20')
year = ifile[i0+1:i0+5]
try:
month = ddict[ifile[0:4].upper()]
except:
month = ddict[ifile[0:3].upper()]
day = ifile[3:i0]
else:
day = ifile[4:i0]
dates.append('{:s}-{:d}-{:s}'.format(year,month,day))
# Frame
i1 = ifile.find('.fits')
frames.append(ifile[i0+6:i1])
# Summarize
print('-----------------------------------------')
print('There are a total of {:d} training images'.format(ntrain))
print('There are a total of {:d} test images'.format(ntest))
print('There are a total of {:d} validation images'.format(nvalid))
print('There are a total of {:d} images!'.format(nvalid+ntrain+ntest))
# Build the table
tbl = Table()
tbl['Type'] = types
tbl['Date'] = dates
tbl['Frame'] = frames
tbl['PI'] = pis
tbl['set'] = sets
# Sort
tbl.sort(['Type', 'Date', 'Frame'])
# Time
t = Time(tbl['Date'], out_subfmt='date')
# Make the LateX Table
# Open
tbfil = open(outfil, 'w')
# tbfil.write('\\clearpage\n')
tbfil.write('\\begin{deluxetable}{lccccc}\n')
# tbfil.write('\\rotate\n')
tbfil.write('\\tablewidth{0pc}\n')
tbfil.write('\\tablecaption{Training Set\\label{tab:images}}\n')
tbfil.write('\\tabletypesize{\\small}\n')
tbfil.write('\\tablehead{\\colhead{Type?} & \\colhead{Date} \n')
tbfil.write('& \\colhead{Frame} \n')
tbfil.write('& \\colhead{Usage} \n')
tbfil.write('} \n')
tbfil.write('\\startdata \n')
for ii,row in enumerate(tbl):
if (ii > 15) & sub:
break
# Line
iline = '{:s} & {:s} & {:s} & {:s}'.format(row['Type'], t[ii].value,
row['Frame'], row['set'])
# End line
tbfil.write(iline)
tbfil.write('\\\\ \n')
# End Table
# End
tbfil.write('\\enddata \n')
#tbfil.write('\\tablenotetext{a}{Star/galaxy classifier from SExtractor with S/G=1 a star-like object. Ignored for $\\theta < \\theta_{\\rm min}$.}\n')
# tbfil.write('\\tablecomments{Units for $C_0$ and $C_1$ are erg/s/cm$^2$/\\AA\ and erg/s/cm$^2$/\\AA$^2$ respecitvely.}\n')
# End
tbfil.write('\\end{deluxetable} \n')
#tbfil.write('\\hline \n')
#tbfil.write('\\end{tabular} \n')
#tbfil.write('\\end{minipage} \n')
#tbfil.write('{$^a$}Restricted to systems with $\mzabs < \mzem$.\\\\ \n')
#tbfil.write('{$^b$}Quasar is reported to exhibit BAL features by \cite{shen11} (1=True). We caution that additional BAL features exist in the purported non-BAL quasars.\\\\ \n')
#tbfil.write('{$^c$}DLA is new (0) or is also reported by N09 (1), PW09 (2), or both (3).\\\\ \n')
#tbfil.write('\\end{table*} \n')
#tbfil.write('\\enddata \n')
#tbfil.write('\\tablenotetext{a}{Flag describing the continuum method applied: 0=Analysis based only on Lyman series lines; 1=Linear fit; 2=Constant fit; 3=Continuum imposed by hand.}\n')
#tbfil.write('\\tablecomments{Units for $C_0$ and $C_1$ are erg/s/cm$^2$/\\AA\ and erg/s/cm$^2$/\\AA$^2$ respecitvely.}\n')
# End
#tbfil.write('\\end{deluxetable*} \n')
tbfil.close()
print('Wrote {:s}'.format(outfil))
#### ########################## #########################
def main(flg_tab):
if flg_tab == 'all':
flg_tab = np.sum( np.array( [2**ii for ii in range(5)] ))
else:
flg_tab = int(flg_tab)
# DR7 Table
if flg_tab & (2**0):
mktab_images(sub=True)# outfil='tab_dr7_dlas_sub.tex', sub=True)
mktab_images()# outfil='tab_dr7_dlas_sub.tex', sub=True)
# Command line execution
if __name__ == '__main__':
if len(sys.argv) == 1:
flg_tab = 0
flg_tab += 2**0 # Image table
#flg_tab += 2**1 # DR12
else:
flg_tab = sys.argv[1]
main(flg_tab)
| pypeit/spit | papers/First/Tables/py/auto_type_tabs.py | auto_type_tabs.py | py | 6,524 | python | en | code | 2 | github-code | 1 | [
{
"api_name": "os.getenv",
"line_number": 37,
"usage_type": "call"
},
{
"api_name": "glob.glob",
"line_number": 46,
"usage_type": "call"
},
{
"api_name": "os.path.basename",
"line_number": 61,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_number": 61,... |
19990022228 | #!/usr/bin/python3
import arcade
screen_width = 600
screen_height = 600
arcade.open_window(screen_width, screen_width, "drawing example")
arcade.set_background_color(arcade.color.WHITE)
arcade.start_render()
# draw the face
x = 300
y = 300
radius = 200
arcade.draw_circle_filled(x, y, radius, arcade.color.YELLOW)
# draw the right eye
x = 370
y = 350
radius = 20
arcade.draw_circle_filled(x, y, radius, arcade.color.BLACK)
# draw the LEFT EYE
x = 230
y = 350
radius = 20
arcade.draw_circle_filled(x, y, radius, arcade.color.BLACK)
# draw the smile
x = 300
y = 280
width = 120
height = 100
start_angle = 190
end_angle = 350
radius = 200
arcade.draw_arc_outline(x, y, width, height, arcade.color.BLACK, start_angle, end_angle, 10)
arcade.finish_render()
arcade.run()
| alvo254/tutorial | game.py | game.py | py | 769 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "arcade.open_window",
"line_number": 7,
"usage_type": "call"
},
{
"api_name": "arcade.set_background_color",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "arcade.color",
"line_number": 8,
"usage_type": "attribute"
},
{
"api_name": "arcade.... |
40325564295 | import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
student_data = pd.read_csv(
"data/HSLS_2016_v1_0_CSV_Datasets/hsls_16_student_v1_0.csv", na_values=[-9, -8, -5, -7, -4, -3])
student_data.head()
# -9 = No Unit Response
# -8 = Missing
# -5 = Supressed
# -7 = Skipped
# -4 = Question not adminstered
# -3 = Carry through missing
# Sort by the first gathered predictor variables
# Getting columns that start with certain keywords
filter_col = [col for col in student_data if col.startswith('S1') or col.startswith('X1') or col.startswith('A1') or col.startswith(
'C1')or col.startswith('STU_ID')]
X = student_data[filter_col]
updated_predictor_col_count = X.isna().sum()
df = updated_predictor_col_count / len(X)
df.to_csv("figures/missingness_imputation_missingness_per_predictor.csv")
# Only take columns with less than 15% of the data missing
X = X[X.columns[X.isnull().mean() < 0.15]]
# Median Imputation
updated_predictor_col = X.fillna(
X.median())
updated_predictor_col.head()
fig, ax = plt.subplots(figsize=(10, 6.7))
sns.set_style('whitegrid')
sns.countplot(x='X1SEX', data=X, palette="mako")
sns.countplot(x='X1RACE', data=X, palette="mako")
X.columns
# Export to CSV file
updated_predictor_col.to_csv("data/processed/baseline_features.csv", index=False)
| karthikvetrivel/HSLS-Predictive-Modellng | input_extract.py | input_extract.py | py | 1,327 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "pandas.read_csv",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "matplotlib.pyplot.subplots",
"line_number": 35,
"usage_type": "call"
},
{
"api_name": "matplotlib.pyplot",
"line_number": 35,
"usage_type": "name"
},
{
"api_name": "seaborn.s... |
5112939663 | import argparse
import importlib
from collections import namedtuple
from typing import Dict
CommandInfo = namedtuple("CommandInfo", "module_path, class_name")
commands_dict: Dict[str, CommandInfo] = {
"download": CommandInfo("gtd.cli.download", "DownloadCommand"),
"export": CommandInfo("gtd.cli.export", "ExportCommand"),
"ts": CommandInfo("gtd.cli.ts", "TsCommand"),
}
class Command:
def __init__(self, name: str) -> None:
self.name = name
def run(self, args: argparse.Namespace) -> int:
raise NotImplementedError
def create_command(name: str) -> Command:
"""
Create an instance of the Command class with the given name.
"""
module_path, class_name = commands_dict[name]
module = importlib.import_module(module_path)
command_class = getattr(module, class_name)
command: Command = command_class(name=name)
return command
| muse-research-lab/cloud-traces-comparison | gtd/cli/commands.py | commands.py | py | 898 | python | en | code | 1 | github-code | 1 | [
{
"api_name": "collections.namedtuple",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "typing.Dict",
"line_number": 8,
"usage_type": "name"
},
{
"api_name": "argparse.Namespace",
"line_number": 19,
"usage_type": "attribute"
},
{
"api_name": "importlib.im... |
2022008519 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Yijia Zheng
# @email : yj.zheng@siat.ac.cn
# @Time : 2019/11/29 11:20:37
import rdkit
import rdkit.Chem as Chem
import copy
import sys
import argparse
from multiprocessing import Pool
from util.chemutils import get_clique_mol, tree_decomp, get_mol, get_smiles, set_atommap, enum_assemble, decode_stereo
from branch_jtnn.mol_tree import MolTree
lg = rdkit.RDLogger.logger()
lg.setLevel(rdkit.RDLogger.CRITICAL)
parser = argparse.ArgumentParser()
parser.add_argument('--mol_file', required=True)
parser.add_argument('--save_vocab_file', required=True)
parser.add_argument('--ncpu', type=int, default=8)
args = parser.parse_args()
cset = set()
with open(args.mol_file, 'r') as f:
lines = f.readlines()
lines = [line[:-1] for line in lines]
def get_tree(smiles):
return MolTree(smiles, 0)
pool = Pool(args.ncpu)
trees = pool.map(get_tree, lines)
for smiles in lines:
mol = Chem.MolFromSmiles(smiles)
for i in range(mol.GetNumAtoms()):
cmol = get_clique_mol(mol, [i])
cset.add(get_smiles(cmol))
for tree in trees:
for c in tree.nodes:
cset.add(c.smiles)
result = '\n'.join(sorted(cset))
with open(args.save_vocab_file, 'w') as f:
f.write(result)
print('done', flush=True)
| aI-area/T-S-polish | scripts/gen_vocab.py | gen_vocab.py | py | 1,284 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "rdkit.RDLogger.logger",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "rdkit.RDLogger",
"line_number": 17,
"usage_type": "attribute"
},
{
"api_name": "rdkit.RDLogger",
"line_number": 18,
"usage_type": "attribute"
},
{
"api_name": "argpars... |
70983029473 | from flask import Flask, render_template, request
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import inspect
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///portfolio.db'
db = SQLAlchemy(app)
class Portfolio(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100))
link = db.Column(db.String(100))
def __init__(self, title, link):
self.title = title
self.link = link
@app.route('/')
def index():
portfolios = Portfolio.query.all()
return render_template('index.html', portfolios=portfolios)
@app.route('/drop', methods=['GET', 'POST'])
def drop_db():
if request.method == 'POST':
db.drop_all()
db.session.commit()
dbInit()
portfolios = Portfolio.query.all()
return render_template('index.html', portfolios=portfolios)
@app.route('/add', methods=['GET', 'POST'])
def add_portfolio():
if request.method == 'POST':
title = request.form['title']
link = request.form['link']
portfolio = Portfolio(title=title, link=link)
db.session.add(portfolio)
db.session.commit()
return render_template('add.html')
def dbInit():
with app.app_context():
inspector = inspect(db.engine)
if not inspector.has_table('portfolio'):
db.create_all()
print('Таблица "portfolio" создана')
else:
print('Таблица "portfolio" уже существует')
if __name__ == '__main__':
dbInit()
app.run(debug=True)
| TubolovArtem/Laba-9-by-Tubolov-Artem | task/app.py | app.py | py | 1,581 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "flask.Flask",
"line_number": 7,
"usage_type": "call"
},
{
"api_name": "flask_sqlalchemy.SQLAlchemy",
"line_number": 9,
"usage_type": "call"
},
{
"api_name": "flask.render_template",
"line_number": 24,
"usage_type": "call"
},
{
"api_name": "flask.req... |
42524493858 | """Utilities for parsing isoformat strings into datetime types.
"""
from datetime import datetime, date, time
def datetimefromisoformat(s):
"""Parse an isoformat string into a datetime.datetime object.
"""
date, time = s.split()
year, month, day = _parsedate(date)
hour, minute, second, microsecond = _parsetime(time)
return datetime(year, month, day, hour, minute, second, microsecond)
def datefromisoformat(s):
"""Parse an isoformat date string into a datetime.date object.
"""
year, month, day = _parsedate(s)
return date(year, month, day)
def timefromisoformat(s):
"""Parse an isoformat time string into a datetime.time object.
"""
hour, minute, second, microsecond = _parsetime(s)
return time(hour, minute, second, microsecond)
def _parsedate(s):
"""Parse an isoformat date string into a (year, month, day) tuple.
"""
return [int(i) for i in s.split('-')]
def _parsetime(s):
"""Parse an isoformat time string into a (hour, minute, second,
microsecond) tuple.
"""
# The microseond is optional
if '.' in s:
time, microsecond = s.split('.')
microsecond = int(microsecond)
else:
time, microsecond = s, 0
hour, minute, second = [int(i) for i in time.split(':')]
return hour, minute, second, microsecond
if __name__ == '__main__':
import unittest
now = datetime.now()
today = date.today()
midnight = time()
class Tests(unittest.TestCase):
datetimeTests = [
[str(now), str(now)],
[str(now.replace(microsecond=0)), str(now.replace(microsecond=0))],
[str(now.replace(second=0, microsecond=0)), str(now.replace(second=0, microsecond=0))],
[str(now.replace(second=0)), str(now.replace(second=0))],
]
dateTests = [
[str(today), str(today)],
]
timeTests = [
[str(time(0,0,0)),'00:00:00'],
[str(time(1,2,3)),'01:02:03'],
[str(time(1,2,3,4)),'01:02:03.000004'],
]
def test_datetime(self):
for test,expected in self.datetimeTests:
result = str(datetimefromisoformat(test))
self.assertEqual(expected, result)
def test_date(self):
for test,expected in self.dateTests:
result = str(datefromisoformat(test))
self.assertEqual(expected, result)
def test_time(self):
for test,expected in self.timeTests:
result = str(timefromisoformat(test))
self.assertEqual(expected, result)
unittest.main()
| timparkin/timparkingallery | share/pollen/datetimeutil.py | datetimeutil.py | py | 2,671 | python | en | code | 2 | github-code | 1 | [
{
"api_name": "datetime.date",
"line_number": 9,
"usage_type": "name"
},
{
"api_name": "datetime.time",
"line_number": 9,
"usage_type": "name"
},
{
"api_name": "datetime.date",
"line_number": 10,
"usage_type": "argument"
},
{
"api_name": "datetime.time",
"line... |
32059293827 | import json
import urllib
from django.conf import settings
def evaluate_recaptcha(request, errors):
# Google Recaptcha validation
recaptcha_response = request.POST.get('g-recaptcha-response')
url = 'https://www.google.com/recaptcha/api/siteverify'
values = {
'secret': settings.GOOGLE_RECAPTCHA_PRIVATE_KEY,
'response': recaptcha_response
}
data = urllib.parse.urlencode(values).encode()
req = urllib.request.Request(url, data=data)
response = urllib.request.urlopen(req)
result = json.loads(response.read().decode())
if not result['success']:
errors.append('Site access denied, Google reCaptcha authentication failed')
| DjangoMeetup/public-website | public_website/apps/formality/views.py | views.py | py | 687 | python | en | code | 1 | github-code | 1 | [
{
"api_name": "django.conf.settings.GOOGLE_RECAPTCHA_PRIVATE_KEY",
"line_number": 12,
"usage_type": "attribute"
},
{
"api_name": "django.conf.settings",
"line_number": 12,
"usage_type": "name"
},
{
"api_name": "urllib.parse.urlencode",
"line_number": 15,
"usage_type": "ca... |
1023399821 | from aiogram import types, Dispatcher
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters import Text
from aiogram.dispatcher.filters.state import State, StatesGroup
from keyboards import admin_batton
from create_bot import bot, dp
from data_base import sqlite_db
from aiogram.types import InlineKeyboardButton, InlineKeyboardMarkup
ID = None
class FSMAdmin(StatesGroup):
photo = State()
name = State()
description = State()
price = State()
# @dp.message_handler(commands=['moderator'], is_chat_admin=True)
async def make_changes_command(message: types.Message):
global ID
ID = message.from_user.id
await bot.send_message(message.from_user.id, "Что нужно хозяин?", reply_markup=admin_batton.button_case_admin)
# @dp.message_handler(commands='Загрузить', state=None)
async def cm_start(message: types.Message):
if message.from_user.id == ID:
await FSMAdmin.photo.set()
await message.reply("Загрузи фото")
# @dp.message_handler(state="*", commands='отмена')
# @dp.message_handler(Text(equals='отмена', ignore_case=True), state="*")
async def cansel_handler(message: types.Message, state: FSMContext):
if message.from_user.id == ID:
current_state = await state.get_state()
if current_state is None:
return
await state.finish()
await message.reply('OK')
# @dp.message_handler(content_types=['photo'], state=FSMAdmin.photo)
async def load_photo(message: types.Message, state: FSMContext):
if message.from_user.id == ID:
async with state.proxy() as data:
data['photo'] = message.photo[0].file_id
await FSMAdmin.next()
await message.reply('Теперь название')
# @dp.message_handler(state=FSMAdmin.name)
async def load_name(message: types.Message, state: FSMContext):
if message.from_user.id == ID:
async with state.proxy() as data:
data['name'] = message.text
await FSMAdmin.next()
await message.reply('Введи описание')
# @dp.message_handler(state=FSMAdmin.description)
async def load_description(message: types.Message, state: FSMContext):
if message.from_user.id == ID:
async with state.proxy() as data:
data['description'] = message.text
await FSMAdmin.next()
await message.reply('Какова цена?')
# @dp.message_handler(state=FSMAdmin.price)
async def load_price(message: types.Message, state: FSMContext):
if message.from_user.id == ID:
async with state.proxy() as data:
data['price'] = float(message.text)
await sqlite_db.sql_add_command(state)
await state.finish()
@dp.callback_query_handler()
async def del_callback_run(callback_query: types.CallbackQuery):
await sqlite_db.delete_command(callback_query.data.replace('del ', ''))
await callback_query.answer(text=f'{callback_query.data.replace("del ", "")} удалена.', show_alert=True)
@dp.message_handler(commands='Удалить')
async def delete_item(message: types.Message):
if message.from_user.id == ID:
read = await sqlite_db.sql_read2()
for ret in read:
await bot.send_photo(message.from_user.id, ret[0], f'{ret[1]}\nОписание: {ret[2]}\nЦена {ret[-1]}')
await bot.send_message(message.from_user.id, text='^^^', reply_markup=InlineKeyboardMarkup(). \
add(InlineKeyboardButton(f'Удалить {ret[1]}', callback_data=f'del {ret[1]}')))
def register_handler_admin(dp: Dispatcher):
dp.register_message_handler(cm_start, commands=['Загрузить'], state=None)
dp.register_message_handler(cansel_handler, state="*", commands='отмена')
dp.register_message_handler(cansel_handler, Text(equals='отмена', ignore_case=True), state="*")
dp.register_message_handler(load_photo, content_types=['photo'], state=FSMAdmin.photo)
dp.register_message_handler(load_name, state=FSMAdmin.name)
dp.register_message_handler(load_description, state=FSMAdmin.description)
dp.register_message_handler(load_price, state=FSMAdmin.price)
dp.register_message_handler(make_changes_command, commands=['moderator'], is_chat_admin=True)
# dp.register_message_handler(del_callback_run, lambda x: x.data and x.data.startwith('del '))
# dp.register_message_handler(delete_item, commands='Удалить')
| DmitriPrilucki/pizza-bot-this-python | handlers/admin.py | admin.py | py | 4,475 | python | en | code | 1 | github-code | 1 | [
{
"api_name": "aiogram.dispatcher.filters.state.StatesGroup",
"line_number": 13,
"usage_type": "name"
},
{
"api_name": "aiogram.dispatcher.filters.state.State",
"line_number": 14,
"usage_type": "call"
},
{
"api_name": "aiogram.dispatcher.filters.state.State",
"line_number": 1... |
29585688181 | import typing as t
import os.path
import logging
import json
from yaml.error import Mark
from typing_extensions import TypedDict, Protocol, Literal
from schemalint.entity import ErrorEvent, Lookup, Context
from schemalint.errors import (
ParseError,
LintError,
ResolutionError,
ValidationError,
MessageError,
)
logger = logging.getLogger(__name__)
StatusType = Literal["INFO", "WARNING", "ERROR"]
class PositionDict(TypedDict):
line: int
character: int
class OutputDict(TypedDict):
status: str
errortype: str
filename: str
start: PositionDict
end: PositionDict
message: str
where: t.List[str]
class Layout(Protocol):
def layout(self, d: OutputDict) -> str:
...
class LTSVLayout(Layout):
def layout(self, d: OutputDict) -> str:
d["start"] = f"{d['start']['line']}@{d['start']['character']}"
d["end"] = f"{d['end']['line']}@{d['end']['character']}"
return "\t".join(f"{k}:{v}" for k, v in d.items())
class JSONLayout(Layout):
def layout(self, d: OutputDict) -> str:
d["where"] = str(d["where"])
return json.dumps(d, ensure_ascii=False)
class Detector:
lookup: Lookup
def __init__(self, filename: str, *, lookup: Lookup):
self.filename = filename # root file
self.lookup = lookup
def has_error_point(self, err: LintError):
return getattr(err, "problem_mark", None) is not None
def detect_status(self, filename):
if self.filename == filename:
return "ERROR"
else:
return "WARNING"
def detect_loadning_start_point(self, err: LintError) -> (Mark, Mark):
if err.data is None:
return self.detect_error_point(err)
map_node = self.lookup.lookup_node(err.data)
knode, vnode = self.lookup_kvpair(map_node, err.path[-1])
return knode.start_mark, vnode.end_mark
def detect_error_point(self, err: LintError) -> Mark:
mark = getattr(err.inner, "context_mark")
import copy
if mark is None:
mark = getattr(err.inner, "problem_mark")
mark.line -= 1 # xxx
start_mark = copy.deepcopy(mark)
start_mark.column = 0
end_mark = copy.deepcopy(mark)
end_mark.column = -1
return (start_mark, end_mark)
def lookup_kvpair(self, node, k): # todo: rename
for knode, vnode in node.value:
if knode.value == k:
return knode, vnode
class Formatter:
detector: Detector
layout: Layout
def __init__(
self, filename: str, *, detector: Detector, layout: t.Optional[Layout] = None
):
self.filename = filename
self.detector = detector
self.layout = layout or LTSVLayout()
def format(self, ev: ErrorEvent) -> str:
err = ev.error
if isinstance(err, ParseError):
return self.format_parse_error(err)
elif isinstance(err, ResolutionError):
return self.format_resolution_error(err)
elif isinstance(err, ValidationError):
return self.format_validation_error(err)
elif isinstance(err, MessageError):
return self.format_message_error(err, context=ev.context)
else:
raise err
def format_parse_error(self, err: ParseError) -> str:
status = self.detector.detect_status(err.history[-1])
if hasattr(err.inner, "problem"):
message = f"{err.inner.problem} ({err.inner.context})"
else:
message = repr(err.inner)
start_mark, end_mark = self.detector.detect_loadning_start_point(err)
filename = os.path.relpath(start_mark.name, start=".")
where = [os.path.relpath(name) for name in err.history]
where[0] = f"{where[0]}:{start_mark.line+1}"
if self.detector.has_error_point(err):
where[-1] = f"{where[-1]}:{err.inner.problem_mark.line+1}"
return self.layout.layout(
OutputDict(
status=status,
errortype=err.__class__.__name__,
filename=filename,
start=PositionDict(
line=start_mark.line + 1, character=start_mark.column
),
end=PositionDict(line=end_mark.line + 1, character=end_mark.column),
message=message,
where=where,
)
)
def format_resolution_error(self, err: ResolutionError) -> str:
start_mark, end_mark = self.detector.detect_loadning_start_point(err)
filename = os.path.relpath(start_mark.name, start=".")
status = self.detector.detect_status(err.history[-1])
message = repr(err.inner)
where = [os.path.relpath(name) for name in err.history]
where[0] = f"{where[0]}:{start_mark.line+1}"
if self.detector.has_error_point(err):
where[-1] = f"{where[-1]}:{err.inner.problem_mark.line+1}"
return self.layout.layout(
OutputDict(
status=status,
errortype=err.__class__.__name__,
filename=filename,
start=PositionDict(
line=start_mark.line + 1, character=start_mark.column
),
end=PositionDict(line=end_mark.line + 1, character=end_mark.column),
message=message,
where=where,
)
)
def format_validation_error(self, err: ValidationError) -> str:
status = "ERROR"
message = f"{err.message} (validator={err.validator})"
node = self.detector.lookup.lookup_node(err.instance) # xxx
start_mark, end_mark = node.start_mark, node.end_mark
filename = os.path.relpath(start_mark.name, start=".")
where = [os.path.relpath(filename)]
where[0] = f"{where[0]}:{start_mark.line+1}"
return self.layout.layout(
OutputDict(
status=status,
errortype=err.__class__.__name__,
filename=filename,
start=PositionDict(
line=start_mark.line + 1, character=start_mark.column
),
end=PositionDict(line=end_mark.line + 1, character=end_mark.column),
message=message,
where=where,
)
)
def format_message_error(
self,
err: MessageError,
*,
context: t.Optional[Context],
status: StatusType = "INFO",
) -> str:
message = err.args[0]
filename = os.path.relpath(context.filename if context else self.filename)
where = [filename]
return self.layout.layout(
OutputDict(
status=status,
errortype=err.__class__.__name__,
filename=filename,
start=PositionDict(line=1, character=1),
end=PositionDict(line=1, character=-1),
message=message,
where=where,
)
)
OutputType = Literal["ltsv", "json"]
def get_formatter(
filename: str, *, lookup: Lookup, output_type: OutputType
) -> Formatter:
detector = Detector(filename, lookup=lookup)
layout = get_layout(output_type)
return Formatter(filename, detector=detector, layout=layout)
def get_layout(output_type: OutputType) -> Layout:
if output_type == "json":
return JSONLayout()
else:
return LTSVLayout()
| podhmo/schemalint | schemalint/formatter.py | formatter.py | py | 7,496 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "logging.getLogger",
"line_number": 19,
"usage_type": "call"
},
{
"api_name": "typing_extensions.Literal",
"line_number": 21,
"usage_type": "name"
},
{
"api_name": "typing_extensions.TypedDict",
"line_number": 24,
"usage_type": "name"
},
{
"api_name"... |
23881066970 | #!/local/data/atorus1/dora/Compilers/epd-7.3-1-rh5-x86_64(1)/bin/python
##!/Library/Frameworks/Python.framework/Versions/Current/bin/python
##!/Users/dora/Library/Enthought/Canopy_32bit/User/bin/python
import scipy
from numpy import ndarray, zeros, array, size, sqrt, meshgrid, flipud, floor, where, amin, argmin,int
import numpy as nm
from mpl_toolkits.mplot3d import Axes3D
from pylab import *
import pprint
import os
import glob
import time
import os
import time
import cPickle as pickle
from matplotlib import colors
import socket
from mpl_toolkits.axes_grid1 import AxesGrid, ImageGrid
from scipy import trapz
from matplotlib import pyplot as plt
from physics1 import *
import physics1 as ph
phToSHow = 1
import AthenaModel as ath
def plotFormat(ax1,ax2=None,ax3=None,im=None):
# from mpl_toolkits.axes_grid1 import make_axes_locatable
fontsize_bar=14
fz = 16
# ax1.set_ylabel('$\tau N(\tau(\theta)>1)$', fontsize = 22)
# ax1.set_title('Obscuring models', fontsize = 19)
#
# ax1.set_ylabel('log (Number of obscuring models)', fontsize = 19)
# for ylabel in ax.get_yticklabels():
# ylabel.set_fontsize(fontsize_x)
#
# ax1.set_xlabel('Inclination angle', fontsize=fz)
ax[0].set_ylabel('$\log(N_{col})$', fontsize=fz)
# for xlabel in ax1.get_xticklabels():
# xlabel.set_fontsize(fontsize_x)
#
# ax1.set_title('Column density $(cm^{-2})$', fontsize =fz)
fig.subplots_adjust(wspace=0.2)
# plt.setp([a.get_yticklabels() for a in fig.axes[1:] ], visible=False)
# ax1.set_ylabel('$log (N_{col})$', fontsize = 19)
# for ylabel in ax.get_yticklabels():
# ylabel.set_fontsize(fontsize_x)
#
# ax2.set_xlabel('Inclination angle', fontsize = 19)
# for xlabel in ax.get_xticklabels():
# xlabel.set_fontsize(fontsize_x)
# divider = make_axes_locatable(ax)
# cax = divider.append_axes("right", size="5%", pad=0.05)
#
# if (im):
# cb =plt.colorbar(im, cax=cax)
# for t in cb.ax.get_yticklabels():
# t.set_fontsize(fontsize_bar)
def cellTrace(dat, th, Xsrc_x_z, phToSHow):
tiny = 1e-18
i_s = dat.i_s
ie = dat.ie
js = dat.js
je = dat.je
zout = dat.x[je] / nm.tan(th) + Xsrc_x_z[1]
ic = abs(dat.z - zout).argmin()
jc = je
# xcur = nm.array([ dat.x[jc], dat.z[ic] ])
xcur = nm.array([ dat.x[dat.je], zout ])
xsrc0= Xsrc_x_z
a = xcur - xsrc0
norm = (xcur - xsrc0)/nm.dot( xcur - xsrc0, xcur - xsrc0)
#crossing x_{js} plane
xz1 =nm.array([ dat.x[js+1], norm[1]/max(norm[0], tiny)*(dat.x[js+1] - xsrc0[0]) + xsrc0[1] ])
# i = minloc((dat. z -xz1[1]) **2)
i = abs(dat.z - xz1[1]).argmin()
if i>=dat.ie or i==dat.i_s: return(0.,0.)
j = dat.js
xz0 = nm.array([ dat.x[j], dat.z[i] ])
stp = 0
dstot = 0.
tau = 0.
dtau = 0.
dcol_dns = 0.
col_dns = 0.
dl = 0.
while True:
xz1 =nm.array([ dat.x[j+1], #crossing x_{j+1} plane
norm[1]/max(norm[0], tiny)*(dat.x[j+1] - xsrc0[0]) + xsrc0[1] ])
Sx = sqrt(nm.dot(xz1 - xz0, xz1-xz0))
itm = i+ nm.int( nm.sign( norm[1] ) )# crossing z_{i+-1} plane
if (norm[1] != 0.):
xz2 =nm.array([ norm[0]/norm[1] *(dat.z[itm] - xsrc0[1]) + xsrc0[0],
dat.z[itm] ])
else:
xz2 =nm.array([ norm[0]/nm.max(norm[1],tiny)*(dat.z[itm] - xsrc0[1]) + xsrc0[0],
dat.z[itm] ])
Sz = sqrt(nm.dot(xz2 - xz0, xz2-xz0))
if (Sz>Sx):
dl = Sx #right
xz0 = xz1
j += 1
else:
dl = Sz #up or down
xz0 = xz2
i = itm
dstot += dl
stp += 1
opac = ph.KPE
# print "here", dat.Rsc, dat.Dsc, dat.ro[i,phToSHow, j], opac, dl,i,j
dtau = dat.Rsc*dat.Dsc* dat.ro[i,phToSHow,j]* opac * dl
dcol_dns = nm.fabs( dat.ro[i, phToSHow, j]*dat.n0* dat.Rsc*dl)
tau += dtau
col_dns += dcol_dns
# print i, j, stp
if ( i <= dat.i_s or i >= dat.ie or j <= dat.js or j >= dat.je-1):
break
if i==ic and j ==jc :
break
return(tau, col_dns)
def transmisionFunctionsAngleGrid(dat, printResults=False):
Nth = 100
Xsrc = array([0, 0])
# res = dat.x[dat.js]/dat.z[dat.ie]
# thmax = nm.pi / 2
thmin = nm.arctan2(dat.x[dat.js], dat.z[dat.ie]-Xsrc[0] )
thmax =nm.arctan2(dat.x[dat.js], dat.z[dat.i_s]-Xsrc[0])
angle = linspace(thmin, thmax, Nth)
# print angle, range(1,Nth)
# time.sleep(3)
tauTheta = zeros(Nth)
colDens = zeros(Nth)
for k,th in zip(range(1,Nth), angle):
# th = nm.pi/2
# print k,th
# time.sleep(3)
tauTheta[k], colDens[k] = cellTrace(dat, th, Xsrc_x_z = [Xsrc[1], Xsrc[0]], phToSHow =phToSHow)
# print("tau=", tauTheta[k], "colDens=", colDens[k], th, Nth)
# exit(); time.sleep(3)
if printResults:
fig = plt.figure()
ax = fig.add_subplot(111)
funcToPlot = log10(colDens)
funcToPlot = tauTheta
ax.plot(angle*180./nm.pi, funcToPlot)
show()
return(tauTheta,colDens, angle)
def iteratorOverDataDirectoriesOverHDfFiles(basePath, dataDir,
simParamFileDir, locdirList2, funcToCalculate, fileToSavePrefix):
mod={}
maxNumFile = 500
dat =ath.athDataModel()
for dirName, i in zip(locdirList2, range(size(locdirList2))):
mod.update({ locdirList2[i]:{'ang':[], 'tau':[], 'cdens':[] }})
simTime = []
nFile = 0.
dat.loadSimulationParam(simParamFileDir + 'athinput.torus9_hydro_2D', print_res=True)
scale = 1.
dat.n0 /= scale
dat.Dsc/=scale
filelist = glob.glob(os.path.join(dataDir, 'mhdXwind*.bin') )
for fileInDir in sorted(filelist):
try:
dat.loadDataFromBinFiles(fileInDir, dat, printDetail=False)
print("file to open:", fileInDir)
except:
print("skip file:", fileInDir)
break
# torMass = dat.torusMass(dat)
# print(torMass/MSOL)
# exit()
tau, cdens, ang = funcToCalculate(dat, printResults=False)
# print tau, cdens, ang
mod[locdirList2[i]]['tau'].append(tau.tolist())
mod[locdirList2[i]]['cdens'].append(cdens.tolist())
if not mod[ locdirList2[i]]['ang'] :
mod[locdirList2[i]]['ang'].append(ang.tolist())
simTime.append(nFile)
nFile+=1
if (nFile > maxNumFile):
print ("maxNumFile reached")
break
mod[ locdirList2[i] ].update( {'par': dat.par })
mod[ locdirList2[i] ]['par'].update( {'dt_bin': dat.dt_bin })
return(mod)
def plotOnePane(ax, scale,Ncol_min, Ncol_max):
Nd = len(locdirList2)
Na = len(mdat[locdirList2[0]] ['ang'][0])
Ny = len(mdat[locdirList2[0]] ['tau'])
Nm=0
distribFun1 = zeros(Na)
distribFun2 = zeros(Na)
col_max=0
col_min = 1e30
lineType = ['-', '--', '-o', '-*']
color = ['k', 'b', 'g', 'r']
markerWidth = [2,2,2,2]
for i_types in xrange(Nd):
for j_y in xrange(Ny):
for k_ang in xrange(Na):
angl = mdat[locdirList2[i_types]] ['ang'][0][k_ang]
tau= mdat[locdirList2[i_types]] ['tau'][j_y][k_ang]
col= mdat[locdirList2[i_types]] ['cdens'][j_y][k_ang]
if col > col_max:
col_max = col
if col <= col_min:
col_min = col
Nm +=1
if tau>1.:
distribFun1[k_ang] +=1
NBINS = 10
colDens, dColdens = nm.linspace(col_min, col_max, NBINS, retstep=True)
NANG = 50
angle = nm.linspace(0., nm.pi, NANG, retstep=True)
ang_scat = []
Ncol_scat = []
simTime = []
told = 0
dt_bin = mdat[locdirList2[0]] ['par']['dt_bin']
for j_y in xrange(Ny):
tnew = told + dt_bin
simTime.append(tnew)
told=tnew
for k_ang in xrange(Na):
angl = mdat[locdirList2[i_types]] ['ang'][0][k_ang]
col= mdat[locdirList2[i_types]] ['cdens'][j_y][k_ang]
scale_m = 1.+ (col - Ncol_min)/(Ncol_max-Ncol_min)*(scale -1)
col /= scale_m
Ncol_scat.append(col)
ang_scat.append(angl)
x = nm.array(mdat[locdirList2[0]]['ang'][0])
imax= distribFun1.argmax()
thmax = x[imax] - nm.pi/2.
x=x-thmax
# print(thmax)
loc1='lower center'
simTimeArr =nm.array(simTime)
print (len(Ncol_scat))
# eps = 1.e-2
# Ncol_scat[Ncol_scat<eps]=eps
if i_types==0:
# color = [str(y/100. ) for y in simTimeArr]
# color = matplotlib.cm.rainbow(color)
# print color
# color = [simTime,simTime]
# x = np.random.rand(100)
# y = np.random.rand(100)
# t=x
# print t
# plt.scatter(x, y, c=t)
# plt.show()
print (nm.array(ang_scat))
x1 =(nm.array(ang_scat))*180./nm.pi - 0
y1= log10( Ncol_scat )
# print len(x),len(y), len(simTime);exit()
cm = plt.cm.get_cmap('RdYlBu')
ax.scatter( x1, y1, cmap=cm)
# .xlim((0,185))
# fig.colorbar(sc)
# ax1.scatter( x,y, c=simTime)
# #
# ax1 = fig.add_subplot(131)
# ax1.plot(x*180./nm.pi, log10(distribFun1), lineType[i_types], color=color[i_types], linewidth=markerWidth[i_types]
# ax1.legend(('$\Gamma=0.01$', '$\Gamma=0.05$', '$\Gamma=0.1$', '$\Gamma=0.3$'),
# shadow = False, loc = loc1)
if i_types==1:
ax2 = fig.add_subplot(142,sharey=ax1)
ax2.scatter((nm.array(ang_scat)-thmax)*180./nm.pi, log10(Ncol_scat), color=color[i_types])
# ax2.legend(('$\Gamma=0.01$', '$\Gamma=0.05$', '$\Gamma=0.1$', '$\Gamma=0.3$'),
# shadow = False, loc = loc1)
if i_types==2:
ax3 = fig.add_subplot(143,sharey=ax1)
ax3.scatter((nm.array(ang_scat)-thmax)*180./nm.pi, log10(Ncol_scat), color=color[i_types])
# ax2.legend(('$\Gamma=0.01$', '$\Gamma=0.05$', '$\Gamma=0.1$', '$\Gamma=0.3$'),
# shadow = False, loc = loc1)
if i_types==3:
ax3 = fig.add_subplot(144,sharey=ax1)
ax3.scatter((nm.array(ang_scat)-thmax)*180./nm.pi, log10(Ncol_scat), color=color[i_types])
# ----------------------------------------------------------------------------
# the MAIN
# ----------------------------------------------------------------------------
whatToDo = 'calculTauAndColDens'
#whatToDo = 'processTauAndColDensFromFile'
dat =ath.athDataModel()
if socket.gethostname()=='atorus':
# locDirList = ['/local/data/atorus2/dora/HW_Jan201707_256x8x256_L0.5n10e8']
locDirList = ['/local/data/atorus1/dora/PROJECTS/AthenaWind']
basePath = locDirList[0]
if os.path.isdir( basePath +'/bin' ):
dataDir = basePath+'/bin/'
locdirList2 = ['']
simParamFileDir = basePath+'/tst/cylindrical/'
# print(simParamFileDir); exit()
else:
dataDir = basePath+'/'
locdirList2 = ['']
simParamFileDir = basePath+'/'
if whatToDo == 'processTauAndColDensFromFile':
pathToPickledFile = dataDir
fileNameList = ['multiDat_TauColDensVsAngle.p', 'multiDat_TauColDensVsAngle.p']
locdirList2 = ['']
put_out= '/local/data/atorus1/dora/PROJECTS/SCRIPTS/T9'
put_FIG= '/local/data/atorus1/dora/PROJECTS/SCRIPTS/T9'
else:
if whatToDo == 'calculTauAndColDens' :
locdirList = [ 'SolovievSep201615_256x8x256_L0.n10e10/']
locdirList = [ 'runDec201608_256x8x256_L0.5n10e8/']
put_out= '/Users/dora/WORK/ECLIPSE_SPACE/torus9'
put_FIG = '/Users/dora/Documents/TEX/torus9/'
locdir = locdirList[0]
dirFileToReadBase = os.getcwd()
dataDir = '/DATA/'
dirFileToReadBase = os.getcwd()
dataDir = '/DATA'
dirToRead = dirFileToReadBase + dataDir+locdirList[0]
dat.loadSimulationParam(dirToRead + 'athinput.torus9_hydro_2D', print_res=True)
elif whatToDo == 'processTauAndColDensFromFile':
pathToPickledFile = '/Users/dora/WORK/ECLIPSE_SPACE/torus9/'
fileNameList = ['SOL_multiDat_TauColDensVsAngle.p', 'HW_multiDat_TauColDensVsAngle.p']
locdirList2 = ['']
put_FIG = '/Users/dora/Documents/TEX/torus9/'
if whatToDo == 'calculTauAndColDens':
print('files =', basePath, dataDir,simParamFileDir)
multiDat=iteratorOverDataDirectoriesOverHDfFiles(basePath, dataDir,simParamFileDir,locdirList2,
transmisionFunctionsAngleGrid, fileToSavePrefix=False)
# for i_type_mod in xrange(len(locdirList2)):
fileToSavePrefix ='multiDat_TauColDensVsAngle.p'
filename = dataDir + fileToSavePrefix
pickle.dump(multiDat, open(filename, "wb"))
# try:
# nm.savetxt(filename, list(multiDat))
# print("saving to ", filename)
# except IOError:
# print('cannot save to', filename)
multiDat=[]
print("calculTauAndColDens ..done");
exit()
if whatToDo =='processTauAndColDensFromFile':
fileName = fileNameList[0]
filename = pathToPickledFile + fileName
fig, ax = plt.subplots(1, 2, sharey=True)
for filename, np in zip(fileNameList, range(len(fileNameList))):
mdat = pickle.load( open( filename, "rb" ) )
Ncol_min = 10**21
Ncol_max = 10**27.4
if np==0:
scale = 1
else:
scale =12
plotOnePane(ax[np],scale, Ncol_min, Ncol_max)
plotFormat(ax[np])
fig.suptitle('Column density $(cm^{-2})$', y=0.95, fontsize=16)
fig.text(0.5, 0.02, 'Inclination angle', ha='center', fontsize=16)
put_out= '/Users/dora/Documents/TEX/torus9/'
fileNameToSave = put_out+'colDensInclAngleAllModels'
# fig.savefig(fileNameToSave + ".pdf", format='pdf')
show()
print('END OF PROGRAM')
# idxa = (np.abs(angle-angl)).argmin()
# idxc = (np.abs(colDens-col)).argmin()
# Ncol = nm.sort(distribFun2)
# indx = nm.argsort(distribFun2)
# for i in indx:
# cmin = distribFun2[0]
# cmax = distribFun2[len(distribFun2)-1]
# for i_type_mod in xrange(len(locdirList2)):
# print("model", i_type_mod)
# for yin_dir, j in zip(mdat[locdirList2[i_type_mod]]['tau'], xrange(N)):
# ax.plot(x[0], log10(yin_dir))
| AntoXa1/T9 | transmission_properties2.py | transmission_properties2.py | py | 17,801 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "numpy.tan",
"line_number": 84,
"usage_type": "call"
},
{
"api_name": "numpy.array",
"line_number": 91,
"usage_type": "call"
},
{
"api_name": "numpy.dot",
"line_number": 98,
"usage_type": "call"
},
{
"api_name": "numpy.array",
"line_number": 100,... |
2421836667 | import pytz
import logging
from datetime import datetime, timedelta
from odoo import fields, models, api, _
from odoo.exceptions import UserError, ValidationError
_logger = logging.getLogger(__name__)
class MrpDistributeTimesheetLine(models.TransientModel):
_name = 'mrp.distribute.timesheet.line'
_description = 'Represents a sample of the distribution'
project_id = fields.Many2one(
'project.project',
string='Project',
)
production_id = fields.Many2one(
'mrp.production',
string='Production',
)
start_time = fields.Datetime()
end_time = fields.Datetime()
def _prepare_analytic_line(self, name):
diff = self.end_time - self.start_time
hours = diff.total_seconds() / 3600
vals = {
'name': name,
'project_id': self.project_id.id,
'production_id': self.production_id.id,
'date_time': self.start_time,
'unit_amount': hours,
}
return vals
class MrpDistributeTimesheet(models.TransientModel):
_name = 'mrp.distribute.timesheet'
_description = 'Distribute working time along multiple production orders'
@api.model
def _default_date_time(self):
def ceil_dt(dt, delta):
return dt + (datetime.min - dt) % delta
return ceil_dt(fields.Datetime.now(), timedelta(minutes=-15))
@api.model
def _default_reason(self):
return self.env.ref('mrp_timesheet_distribution.layout_and_wiring')
production_ids = fields.Many2many(
'mrp.production',
string='Production Orders',
readonly=True,
)
reason_id = fields.Many2one(
'mrp.distribute.timesheet.reason',
string='Reason',
required=True,
default=_default_reason
)
custom_reason = fields.Char(string='Other Reason')
date_time = fields.Datetime(
default=_default_date_time,
required=True,
)
unit_amount = fields.Float(
'Quantity',
default=0.0,
required=True,
)
timesheet_line_ids = fields.Many2many(
'mrp.distribute.timesheet.line',
string='Timesheet Lines',
compute="_compute_timesheet_line_ids",
)
exclude_time = fields.Boolean()
excluded_start_time = fields.Datetime()
excluded_end_time = fields.Datetime()
@api.model
def default_get(self, fields):
rec = super().default_get(fields)
active_ids = self._context.get('active_ids')
active_model = self._context.get('active_model')
if active_model == 'mrp.production' and active_ids:
production_ids = self.env['mrp.production'].browse(active_ids)
rec.update({'production_ids': production_ids.ids})
return rec
@api.multi
def action_reopen(self):
return self._reopen()
@api.multi
def _reopen(self, id=False):
return {
'type': 'ir.actions.act_window',
'view_mode': 'form',
'view_type': 'form',
'res_id': id or self.id,
'res_model': self._name,
'target': 'new',
'context': {
'default_model': self._name,
},
}
@api.multi
def action_distribute(self):
self._do_distribute()
@api.multi
def action_distribute_continue(self):
self._do_distribute()
mrp_distribute_timesheet_id = self.create(
{
"date_time": self.timesheet_line_ids[-1].end_time,
"reason_id": self.reason_id.id,
"exclude_time": self.exclude_time,
"production_ids": [(6, 0, self.production_ids.ids)],
}
)
mrp_distribute_timesheet_id.onchange_date_time()
return self._reopen(mrp_distribute_timesheet_id.id)
@api.onchange('date_time')
def onchange_date_time(self):
# Convert datetime into user timezone to manipulate hours and minutes
tz = self.env.context.get('tz') or self.env.user.tz
date_time_tz = pytz.timezone(tz).normalize(
pytz.utc.localize(self.date_time)
)
resource_calendar_id = self.env.user.resource_calendar_id
attendance_ids = resource_calendar_id.attendance_ids.filtered(
lambda r: r.dayofweek == str(self.date_time.weekday())
)
st = et = False
for attendance_id in attendance_ids:
if attendance_id.day_period == 'morning':
hour, minute = divmod(float(attendance_id.hour_to) * 60, 60)
st = date_time_tz.replace(
hour=round(hour), minute=round(minute), second=0
)
if attendance_id.day_period == 'afternoon':
hour, minute = divmod(float(attendance_id.hour_from) * 60, 60)
et = date_time_tz.replace(
hour=round(hour), minute=round(minute), second=0
)
# Set start and end time in user timezone
if not st or not et:
st = date_time_tz.replace(hour=12, minute=0, second=0)
et = date_time_tz.replace(hour=13, minute=30, second=0)
# Convert back data to UTC since all datetime data must be
# stored without timezone info (means UTC)
self.excluded_start_time = pytz.utc.normalize(st).replace(tzinfo=None)
self.excluded_end_time = pytz.utc.normalize(et).replace(tzinfo=None)
@api.depends(
'date_time', 'unit_amount', 'exclude_time', 'excluded_start_time',
'excluded_end_time'
)
def _compute_timesheet_line_ids(self):
self.timesheet_line_ids.unlink()
if self.production_ids and self.date_time and self.unit_amount:
start = self.date_time
end = start + timedelta(hours=self.unit_amount)
if self.exclude_time and self.excluded_end_time > start >= self.excluded_start_time:
start = self.excluded_end_time
end = start + timedelta(hours=self.unit_amount)
self._generate_timesheet_interval(start, end)
elif self.exclude_time and end > self.excluded_start_time and start < self.excluded_end_time:
start_delta = self.excluded_start_time - start
self._generate_timesheet_interval(start, start + start_delta)
self._generate_timesheet_interval(
self.excluded_end_time, self.excluded_end_time +
timedelta(hours=self.unit_amount) - start_delta
)
else:
self._generate_timesheet_interval(start, end)
def _generate_timesheet_interval(self, start, end):
diff = (end - start) / len(self.production_ids)
i = 0
line_ids = self.env['mrp.distribute.timesheet.line']
for production_id in self.production_ids:
vals = {
'start_time': i * diff + start,
'end_time': (i + 1) * diff + start,
'project_id': production_id.project_id.id,
'production_id': production_id.id,
}
line_ids += self.env['mrp.distribute.timesheet.line'].create(vals)
i += 1
self.timesheet_line_ids += line_ids
@api.model
def _get_or_create_task(self, project_id, name):
time_tracking_type = self.env.ref(
'project_identification.time_tracking_type'
)
stage_done = self.env.ref(
'project_task_default_stage.project_tt_deployment'
)
task_id = self.env['project.task'].search(
[
('project_id', '=', project_id.id),
('name', '=', name),
],
limit=1
)
if not task_id:
vals = {
'project_id': project_id.id,
'type_id': time_tracking_type.id,
'stage_id': stage_done.id,
'name': name,
}
task_id = self.env['project.task'].create(vals)
return task_id
def _do_distribute(self):
if not self.timesheet_line_ids:
raise ValidationError(
_('The number of timesheet lines cannot be 0.')
)
for line_id in self.timesheet_line_ids:
name = self.reason_id.name
if self.reason_id == self.env.ref(
'mrp_timesheet_distribution.other'
):
name = self.custom_reason
vals_line = line_id._prepare_analytic_line(name)
if not 'task_id' in vals_line:
vals_line['task_id'] = self._get_or_create_task(
line_id.project_id, name
).id
self.env['account.analytic.line'].create(vals_line)
| decgroupe/odoo-addons-dec | mrp_timesheet_distribution/wizard/mrp_distribute_timesheet.py | mrp_distribute_timesheet.py | py | 8,775 | python | en | code | 2 | github-code | 1 | [
{
"api_name": "logging.getLogger",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "odoo.models.TransientModel",
"line_number": 11,
"usage_type": "attribute"
},
{
"api_name": "odoo.models",
"line_number": 11,
"usage_type": "name"
},
{
"api_name": "odoo.fie... |
34309580961 | from odoo import models, fields, api, tools
from datetime import datetime, timedelta
import json
# ***************** CREAR CIUDADES **********************
class create_city(models.TransientModel):
_name = 'medievol.create_city'
def _default_player(self):
jugador = self.env['res.partner'].browse(self._context.get('active_id'))
print(jugador.name)
return jugador
player = fields.Many2one('res.partner', default=_default_player, readonly=True)
region = fields.Many2one('medievol.region')
state = fields.Selection([('i', "Elegir región"), ('f', "Finalizar región")], default='i')
nombre = fields.Char()
@api.multi
def back(self):
if self.state == 'f':
self.state = 'i'
return {
'type': 'ir.actions.act_window',
'name': 'Creación de ciudad',
'res_model': self._name,
'res_id': self.id,
'view_mode': 'form',
'target': 'new'
}
@api.multi
def next(self):
if self.state == 'i':
self.state = 'f'
return {
'type': 'ir.actions.act_window',
'name': 'Creación de ciudad',
'res_model': self._name,
'res_id': self.id,
'view_mode': 'form',
'target': 'new'
}
@api.multi
def new(self):
c_template = self.env.ref('medievol.city1')
c = self.env['medievol.city'].create({
'name': self.nombre,
'image': c_template.image,
'poblation': 100,
'defense': 50,
'quarry_id': self.env.ref('medievol.quarry1').id,
'mine_id': self.env.ref('medievol.mine1').id,
'farm_id': self.env.ref('medievol.farm1').id,
'castle_id': self.env.ref('medievol.castle1').id,
'infirmary_id': self.env.ref('medievol.infirmary0').id,
'wall_id': self.env.ref('medievol.wall0').id,
'barracks_id': self.env.ref('medievol.barracks0').id,
'treasury_id': self.env.ref('medievol.treasury0').id,
'forge_id': self.env.ref('medievol.forge0').id,
'player_id': self.player.id,
'region_id': self.region.id
})
for i in [1, 2, 3, 4]:
if i < 4:
p = self.env['medievol.means_city'].create({
'cantidad': 100,
'city_id': c.id,
'means_id': self.env.ref('medievol.means' + str(i)).id
})
else:
p = self.env['medievol.means_city'].create({
'cantidad': 50,
'city_id': c.id,
'means_id': self.env.ref('medievol.means' + str(i)).id
})
# ***************** CREAR GUERRAS *********************************
class type_soldiers(models.TransientModel):
_name = 'medievol.type_soldiers'
brujo = fields.Many2one('medievol.create_wars')
tipo = fields.Many2one('medievol.soldiers')
quantity = fields.Float()
class create_wars(models.TransientModel):
_name = 'medievol.create_wars'
def _default_city(self):
return self.env['medievol.city'].browse(self._context.get('active_id'))
def _default_player(self):
player = self.env['medievol.city'].browse(self._context.get('active_id')).player_id
return player
def _get_date(self):
date = datetime.now()
return fields.Datetime.to_string(date)
def _get_date_fin(self):
date = datetime.now() + timedelta(hours=4)
return fields.Datetime.to_string(date)
city = fields.Many2one('medievol.city', default=_default_city, readonly=True)
player = fields.Many2one('res.partner', default=_default_player, readonly=True)
fecha_ini = fields.Datetime(default=_get_date)
fecha_fin = fields.Datetime(default=_get_date_fin)
soldiers_wars = fields.Many2many('medievol.soldiers_wars')
tipo = fields.Many2one('medievol.soldiers')
def _get_attack_domain(self):
c = self._context.get('citys')
if c:
c = json.loads(c)
return [('id','in',c)]
city_attack = fields.Many2one('medievol.city', domain=_get_attack_domain)
wiz_soldiers = fields.One2many('medievol.type_soldiers', 'brujo')
quantity = fields.Float()
state = fields.Selection([('i', "Selección Soldados"), ('c', "Ciudad a Atacar"), ('f', "Finalizar")],
default='i')
@api.onchange('city')
def _onchange_soldiers(self):
for f in self:
return {
'domain': {'tipo': [('id', 'in', f.city.soldiers_city_id.soldiers_id.ids)]}
}
@api.multi
def send_soldiers(self):
for f in self:
self.env['medievol.type_soldiers'].create({
'brujo': f.id,
'tipo': f.tipo.id,
'quantity': f.quantity
})
return {
'type': 'ir.actions.act_window',
'name': 'Creación de Batallas',
'res_model': self._name,
'res_id': self.id,
'view_mode': 'form',
'target': 'new'
}
@api.multi
def exit(self):
return
@api.multi
def next(self):
if self.state == 'i':
self.state = 'c'
elif self.state == 'c':
self.state = 'f'
return {
'type': 'ir.actions.act_window',
'name': 'Creación de Batallas',
'res_model': self._name,
'res_id': self.id,
'view_mode': 'form',
'context': {'citys': json.dumps(self.env['medievol.city'].search([]).filtered(lambda r: r.id != self.city.id).ids)},
'target': 'new'
}
@api.multi
def back(self):
if self.state == 'f':
self.state = 'c'
elif self.state == 'c':
self.state = 'i'
return {
'type': 'ir.actions.act_window',
'name': 'Creación de Batallas',
'res_model': self._name,
'res_id': self.id,
'view_mode': 'form',
'target': 'new'
}
@api.multi
def new_battle(self):
for f in self:
soldados_cantidad = f.wiz_soldiers
for soldiers in soldados_cantidad:
batalla = self.env['medievol.wars'].create({})
self.env['medievol.soldiers_wars'].create({
'cantidad': soldiers.quantity,
'soldiers_id': soldiers.tipo.id,
'city_id': f.city.id,
'city_attack_id': f.city_attack.id,
'batalla_id': batalla.id,
'player_id': f.player.id
})
# ***************** CREAR SOLDADOS **********************
class create_soldiers(models.TransientModel):
_name = 'medievol.create_soldiers'
def _default_city(self):
city = self.env['medievol.city'].browse(self._context.get('city_id'))
return city
city = fields.Many2one('medievol.city', default=_default_city, readonly=True)
soldiers = fields.Many2one('medievol.soldiers')
quantity = fields.Float()
comprobar = fields.Boolean()
cfoodsoldier = fields.Integer(readonly=True)
cgoldsoldier = fields.Integer(readonly=True)
cfoodcity = fields.Float(readonly=True)
cgoldcity = fields.Float(readonly=True)
state = fields.Selection([('i', "Selección Soldados"), ('c', "Gastos de Producción"), ('f', "Finalizar")],
default='i')
@api.multi
def new(self):
c = self.env['medievol.recruit_soldiers'].create({
'cantidad': self.quantity,
'city_id': self.city.id,
'soldiers_id': self.soldiers.id
})
encontrado = False
soldados_ciudad = self.city.soldiers_city_id.search([])
if len(soldados_ciudad) > 0:
for sol in soldados_ciudad:
if c.soldiers_id == sol.soldiers_id:
sol.cantidad = sol.cantidad + c.cantidad
encontrado = True
if encontrado == False:
soldier_new = self.env['medievol.soldiers_city'].create({
'cantidad': c.cantidad,
'city_id': c.city_id.id,
'soldiers_id': c.soldiers_id.id
})
granja = self.env['medievol.farm'].search([])[0]
tesoreria = self.env['medievol.treasury'].search([])[0]
recursos = self.city.means_city_id
for r in recursos:
if r.means_id == granja.means_id:
r.cantidad = r.cantidad - self.cfoodsoldier
elif r.means_id == tesoreria.means_id:
r.cantidad = r.cantidad - self.cgoldsoldier
@api.multi
def back(self):
if self.state == 'f':
self.state = 'c'
elif self.state == 'c':
self.state = 'i'
return {
'type': 'ir.actions.act_window',
'name': 'Creación de Soldados',
'res_model': self._name,
'res_id': self.id,
'view_mode': 'form',
'target': 'new'
}
@api.multi
def next(self):
if self.state == 'i':
self.state = 'c'
soldado = self.soldiers
self.cfoodsoldier = soldado.food * self.quantity
self.cgoldsoldier = soldado.gold * self.quantity
granja = self.env['medievol.farm'].search([])[0]
tesoreria = self.env['medievol.treasury'].search([])[0]
recursos = self.city.means_city_id
for r in recursos:
if r.means_id == granja.means_id:
self.cfoodcity = r.cantidad
elif r.means_id == tesoreria.means_id:
self.cgoldcity = r.cantidad
cantidadfood = self.cfoodcity - self.cfoodsoldier
cantidadgold = self.cgoldcity - self.cgoldsoldier
if cantidadfood >= 0 and cantidadgold >= 0:
self.comprobar = True
else:
self.comprobar = False
elif self.state == 'c':
if self.comprobar:
self.state = 'f'
return {
'type': 'ir.actions.act_window',
'name': 'Creación de Soldados',
'res_model': self._name,
'res_id': self.id,
'view_mode': 'form',
'target': 'new'
}
| Buffty/ProyectoDAM | modules/medievol/models/wizards.py | wizards.py | py | 10,467 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "odoo.models.TransientModel",
"line_number": 7,
"usage_type": "attribute"
},
{
"api_name": "odoo.models",
"line_number": 7,
"usage_type": "name"
},
{
"api_name": "odoo.fields.Many2one",
"line_number": 15,
"usage_type": "call"
},
{
"api_name": "odoo.f... |
2847074558 | # Import keras.
import keras as kr
import tensorflow as tf
from keras.models import Sequential
from keras.datasets import mnist
from keras.models import Sequential, load_model
from keras.layers.core import Dense, Dropout, Activation
from keras.utils import np_utils
from keras.models import load_model
import sklearn.preprocessing as pre
import sys
import gzip
import numpy as np
import cv2
import matplotlib.pyplot as plt
import PIL
from PIL import Image, ImageDraw, ImageTk
import tkinter as tk
import os.path
## Function to build the model, need to have a folder called dataset on same level as the script
## .gz files must be located in here
## Canvas all taken from https://www.youtube.com/watch?v=OdDCsxfI8S0
width = 200
height = 200
center = height//2
white = (255, 255, 255)
green = (0,128,0)
ModelCreated = False
def save():
filename = "image.png"
image1.save(filename)
def paint(event):
# python_green = "#476042"
x1, y1 = (event.x - 1), (event.y - 1)
x2, y2 = (event.x + 1), (event.y + 1)
cv.create_oval(x1, y1, x2, y2, fill="black",width=5)
draw.line([x1, y1, x2, y2],fill="black",width=5)
##############################################################################################
def buildModel():
model = kr.models.Sequential()
## https://machinelearningmastery.com/handwritten-digit-recognition-using-convolutional-neural-networks-python-keras/
## Used this reference for dropout values as I was unsure why this was used
model.add(Dense(512, input_shape=(784,)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
with gzip.open('dataset/train-images-idx3-ubyte.gz', 'rb') as f:
train_img = f.read()
with gzip.open('dataset/train-labels-idx1-ubyte.gz', 'rb') as f:
train_lbl = f.read()
train_img = np.array(list(train_img[16:])).reshape(60000, 28, 28).astype(np.uint8)/ 255.0
train_lbl = np.array(list(train_lbl[ 8:])).astype(np.uint8)
encoder = pre.LabelBinarizer()
encoder.fit(train_lbl)
outputs = encoder.transform(train_lbl)
inputs = train_img.reshape(60000, 784)
# Train the model with our inputs(Images) and outputs (Labels)
#print("Building neural network - May take a few mins!")
model.fit(inputs, outputs, epochs=10, batch_size=128)
model.save('Mnist')
def CompareImage(imageFile):
with gzip.open('dataset/train-labels-idx1-ubyte.gz', 'rb') as f:
train_lbl = f.read()
train_lbl = np.array(list(train_lbl[ 8:])).astype(np.uint8)
New_model = load_model('Mnist')
print("According to my network your number is: ")
encoder = pre.LabelBinarizer()
encoder.fit(train_lbl)
print(encoder.inverse_transform(New_model.predict(imageFile)))
def convertImage(imagefile):
## Really good reference for this : http://www.appstate.edu/~marshallst/GLY3455/lectures/9_Image_Processing.pdf
## Convert to greyscale
im = Image.open(imagefile).convert('L')
## Make sure image is resized
im= im.resize((28, 28), Image.BICUBIC)
## Convert to list
im = list(im.getdata())
# Currently everything is in bytes 0 - 255 , we want to make this 0-1
im = [(255 - x) * 1.0 / 255.0 for x in im]
## need to reshape for our model, expects an array of length 1-D array of size 784
im = np.array(list(im)).reshape(1,784)
print("Image successfully converted! Sending To model")
## Send the ready array to our build model function
CompareImage(im)
def print_menu():
print("-" * 15 , "Welcome to Keiths Digit Recognition Script" , 15 * "-")
print("A. Create Model (Must do this first) " + "Model Created: " + str(ModelCreated))
print("B. Select your own image")
print("C. Draw your digit")
print("D. Exit")
loop=True
## Check if model is created
while loop:
## While loop which will keep going until loop = False
if os.path.isfile('Mnist'):
ModelCreated = True
print_menu() ## Displays menu
choice = input("Enter your choice [A-C]: ")
print(choice)
if choice == 'A':
print("Creating Model")
buildModel()
elif choice == 'B':
userInput = input("Please enter file name/path: ")
convertImage(userInput)
elif choice=='C':
# Canvas taken from https://www.youtube.com/watch?v=OdDCsxfI8S0
print("Creating canvas (X canvas off when finished and select option one and enter 'image.png')")
root = tk.Tk()
# Tkinter create a canvas to draw on
cv = tk.Canvas(root, width=width, height=height, bg='white')
cv.pack()
# PIL create an empty image and draw object to draw on
# memory only, not visible
image1 = PIL.Image.new("RGB", (width, height), white)
draw = ImageDraw.Draw(image1)
# do the Tkinter canvas drawings (visible)
# cv.create_line([0, center, width, center], fill='green')
cv.pack()
cv.bind("<B1-Motion>", paint)
# do the PIL image/draw (in memory) drawings
# draw.line([0, center, width, center], green)
# PIL image can be saved as .png .jpg .gif or .bmp file (among others)
# filename = "my_drawing.png"
# image1.save(filename)
button=tk.Button(text="save",command=save)
button.pack()
root.mainloop()
elif choice=='D':
print("Exit")
## You can add your code or functions here
loop=False # This will make the while loop to end as not value of loop is set to False
else:
# Any integer inputs other than values 1-5 we print an error message
print("Wrong option selection. Enter any key to try again..")
| KeithH4666/Jupyter-Notebooks-e.g-iris-classifier-script-Mnist-Dataset-script | Digit Recognition Script/Handwritten.py | Handwritten.py | py | 5,998 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "keras.models.Sequential",
"line_number": 48,
"usage_type": "call"
},
{
"api_name": "keras.models",
"line_number": 48,
"usage_type": "attribute"
},
{
"api_name": "keras.layers.core.Dense",
"line_number": 52,
"usage_type": "call"
},
{
"api_name": "ker... |
18323491914 | import logging
from typing import Dict, List
import torch
from torch import nn
from detectron2.config import configurable
from detectron2.utils.events import get_event_storage
from detectron2.modeling import META_ARCH_REGISTRY
from typing import Any
from .meta_one_stage_detector import MetaProposalNetwork
from sylph.modeling.code_generator.build import build_code_generator
from detectron2.modeling.meta_arch import GeneralizedRCNN
from detectron2.layers.batch_norm import FrozenBatchNorm2d
logger = logging.getLogger(__name__)
__all__ = ["FewShotGeneralizedRCNN", "FewShotDetector"]
@META_ARCH_REGISTRY.register()
class FewShotGeneralizedRCNN(MetaProposalNetwork, GeneralizedRCNN):
"""
Generalized R-CNN. Any models that contains the following three components:
1. Per-image feature extraction (aka backbone)
2. Region proposal generation
3. Per-region feature extraction and prediction
Inherits both Faster RCNN and a MetaProposalNetwork
"""
@configurable
def __init__(
self,
*,
cfg,
episodic_learning: bool,
code_generator: nn.Module, # add code generator, optional
**kwargs,
):
"""
Args:
backbone: a backbone module, must follow detectron2's backbone interface
proposal_generator: a module that generates proposals using backbone features
roi_heads: a ROI head that performs per-region computation
pixel_mean, pixel_std: list or tuple with #channels element, representing
the per-channel mean and std to be used to normalize the input image
input_format: describe the meaning of channels of input. Needed by visualization
vis_period: the period to run visualization. Set to 0 to disable.
"""
GeneralizedRCNN.__init__(self, **kwargs)
self.cfg = cfg
self.episodic_learning = episodic_learning
self.code_generator = code_generator
self.in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES
if self.episodic_learning:
assert self.code_generator is not None
self._freeze_parameters(cfg)
def _freeze_proposal_heads(self, cfg):
if cfg.MODEL.PROPOSAL_GENERATOR.FREEZE:
for _, p in self.proposal_generator.named_parameters():
p.requires_grad = False
# convert the batch norm to frozen batch norm
FrozenBatchNorm2d.convert_frozen_batchnorm(self.proposal_generator)
logger.info("froze proposal_generator heads parameters")
def _freeze_roi_heads(self, cfg):
if cfg.MODEL.ROI_HEADS.FREEZE:
for _, p in self.roi_heads.named_parameters():
p.requires_grad = False
# convert the batch norm to frozen batch norm
FrozenBatchNorm2d.convert_frozen_batchnorm(self.roi_heads)
logger.info("froze roi heads parameters")
def _freeze_parameters(self, cfg):
"""
Freeze parameters from BACKBONE, PROPOSAL_GENERATOR, and META_LEARN.CODE_GENERATOR
"""
self._freeze_backbone_parameters(cfg)
self._freeze_detector_head(cfg) # proposal_generator
self._freeze_code_generator(cfg)
self._freeze_roi_heads(cfg)
self._freeze_proposal_heads(cfg)
@classmethod
def from_config(cls, cfg):
ret = GeneralizedRCNN.from_config(cfg)
# backbone = build_backbone(cfg)
episodic_learning = cfg.MODEL.META_LEARN.EPISODIC_LEARNING
input_shape = ret["backbone"].output_shape()
in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES
input_shapes = [input_shape[f] for f in in_features]
strides = [input_shape[f].stride for f in in_features]
code_generator = ( #TODO: add strides for
build_code_generator(cfg, feature_channels=input_shapes[0].channels, feature_levels=len(in_features), strides=strides)
if episodic_learning
else None
)
ret["cfg"] = cfg
ret["episodic_learning"] = episodic_learning
ret["code_generator"] = code_generator
return ret
def forward_base_detector(self, batched_inputs: List[Dict[str, torch.Tensor]]): # rename the forward to forward_base_detector
"""
Args:
batched_inputs: a list, batched outputs of :class:`DatasetMapper` .
Each item in the list contains the inputs for one image.
For now, each item in the list is a dict that contains:
* image: Tensor, image in (C, H, W) format.
* instances (optional): groundtruth :class:`Instances`
* proposals (optional): :class:`Instances`, precomputed proposals.
Other information that's included in the original dicts, such as:
* "height", "width" (int): the output resolution of the model, used in inference.
See :meth:`postprocess` for details.
Returns:
list[dict]:
Each dict is the output for one input image.
The dict contains one key "instances" whose value is a :class:`Instances`.
The :class:`Instances` object has the following keys:
"pred_boxes", "pred_classes", "scores", "pred_masks", "pred_keypoints"
"""
assert not self.episodic_learning
if not self.training:
return GeneralizedRCNN.inference(self, batched_inputs)
images = self.preprocess_image(batched_inputs)
if "instances" in batched_inputs[0]:
gt_instances = [x["instances"].to(self.device) for x in batched_inputs]
else:
gt_instances = None
features = self.backbone(images.tensor)
if self.proposal_generator is not None:
proposals, proposal_losses = self.proposal_generator(images, features, gt_instances)
else:
assert "proposals" in batched_inputs[0]
proposals = [x["proposals"].to(self.device) for x in batched_inputs]
proposal_losses = {}
_, detector_losses = self.roi_heads(images, features, proposals, gt_instances)
if self.vis_period > 0:
storage = get_event_storage()
if storage.iter % self.vis_period == 0:
self.visualize_training(batched_inputs, proposals)
losses = {}
losses.update(detector_losses)
losses.update(proposal_losses)
return losses
def forward_few_shot_detector_training(self, batched_inputs: List[Dict[str, Any]]):
assert self.training
assert "support_set" in batched_inputs[0]
assert "query_set" in batched_inputs[0]
# 1. Separate batched inputs to batched support set and query set
# bs * SHOT
batched_inputs_support_set = [
record for x in batched_inputs for record in x["support_set"]
]
# bs
batched_inputs_support_set_targets = [
x["support_set_target"] for x in batched_inputs
]
batched_inputs_support_set_targets = self._put_to_device(batched_inputs_support_set_targets)
support_set_gt_instances =[x["instances"].to(self.device) for x in batched_inputs_support_set]
# check the batch norm info, need to use all modules
for name, module in self.backbone.named_children():
if isinstance(module, FrozenBatchNorm2d):
print(f"module {name}, statistics: {module.running_mean,module.running_var,module.weight,module.bias}")
# bs * QUERY_SHOT
batched_inputs_query_set = [
record for x in batched_inputs for record in x["query_set"]
]
# 2. Extract features
support_set_images_lst = self.convert_batched_inputs_to_image_list(batched_inputs_support_set)
support_set_images_feature = self._extract_backbone_features(support_set_images_lst.tensor)
query_set_images_lst = self.convert_batched_inputs_to_image_list(batched_inputs_query_set)
query_set_images_feature = self._extract_backbone_features(query_set_images_lst.tensor)
# filter gt_instances
query_set_gt_instances = self._get_gt(
batched_inputs_query_set, support_set_targets=batched_inputs_support_set_targets
)
# CHANGE START
if self.proposal_generator is not None:
# TODO: use gt_instances before filtering in meta-learning if retraining
proposals, proposal_losses = self.proposal_generator(query_set_images_lst, query_set_images_feature, query_set_gt_instances)
# print(f"proposal here: {proposal_losses}")
else:
assert "proposals" in batched_inputs[0]
proposals = [x["proposals"].to(self.device) for x in batched_inputs]
proposal_losses = {}
# Get support set class codes
support_set_class_codes = self.code_generator([support_set_images_feature[f] for f in self.in_features], support_set_gt_instances)
# print(f"support_set_class_codes: {support_set_class_codes}")
# Generate detection
_, detector_losses = self.roi_heads(query_set_images_lst, query_set_images_feature, proposals, query_set_gt_instances, support_set_class_codes, batched_inputs_support_set_targets)
losses = {}
losses.update(detector_losses)
losses.update(proposal_losses)
if "snnl" in support_set_class_codes:
losses.update(
{"loss_snnl": support_set_class_codes["snnl"]}
)
return losses
def forward_instances(self, batched_inputs: List[Dict[str, Any]], class_codes: Dict[str, torch.Tensor], do_postprocess: bool =True):
"""
Inputs is support set of length 1. Used only for inference. Generate class codes for all given support sets in batched_inputs
Args:
batched_inputs: a list of images as dict from a normal data loader.
class_codes: a dict with "cls_conv" and "cls_bias" as torch tensor
Returns:
support_set_class_codes: a list of dict output from code generator
"""
assert self.episodic_learning
assert not self.training
assert class_codes is not None
images_lst = self.convert_batched_inputs_to_image_list(batched_inputs)
images_features = self.backbone(images_lst.tensor)
if self.proposal_generator is not None:
proposals, _ = self.proposal_generator(images_lst, images_features, None)
else:
assert "proposals" in batched_inputs[0]
proposals = [x["proposals"].to(self.device) for x in batched_inputs]
results, _ = self.roi_heads(images=images_lst, features=images_features, proposals=proposals, targets=None, class_codes=class_codes, class_codes_target=None)
if do_postprocess:
assert not torch.jit.is_scripting(), "Scripting is not supported for postprocess."
return GeneralizedRCNN._postprocess(results, batched_inputs, images_lst.image_sizes)
else:
return results
def forward(self, batched_inputs: List[Dict[str, Any]]):
"""
Forward for base detector's training and inference and meta-learning's training stage.
Args:
Same as in :class:`GeneralizedRCNN.forward`
Returns:
list[dict]:
Each dict is the output for one input image.
The dict contains one key "proposals" whose value is a
:class:`Instances` with keys "proposal_boxes" and "objectness_logits".
"""
if not self.episodic_learning:
return self.forward_base_detector(batched_inputs)
# episodic learning
if self.training:
return self.forward_few_shot_detector_training(batched_inputs)
else:
raise NotImplementedError(
"Episodic learning inferrence for image and features is not supported in forward."
)
@META_ARCH_REGISTRY.register()
class FewShotDetector(FewShotGeneralizedRCNN):
"""
Meta Arach for few-shot detection, four forward types:
1. training model, includes both pretraining and meta-learning training (TODO: pretraining can be done in other code base)
2. run_tupe = "meta_learn_test_support": takes batched input from support set data loader, return class codes as dict
3. run_tupe = "meta_learn_test_instance": takes batched inputs from query set data loader and class codes, return processed result
4. run_type = None: a normal base detector inference
"""
def forward(
self, batched_inputs, class_code=None, run_type=None
):
# logics for pretraining and meta-learning training
if self.training:
return super().forward(batched_inputs)
# logics for testing
if run_type is None:
processed_results = super().forward(batched_inputs)
return processed_results
if run_type == "meta_learn_test_support":
return self.forward_class_code(batched_inputs)
if run_type == "meta_learn_test_instance":
# class code could be None here if eval with pretrained class code
return self.forward_instances(batched_inputs, class_code)
raise NotImplementedError(f"not support this forward type: {run_type}, class_code: {class_code}")
def forward_class_code(self, batched_inputs):
assert not self.training, "Not for training"
return super().forward_class_code(batched_inputs)
def forward_instances(self, batched_inputs, class_codes):
assert not self.training, "Not for training"
return super().forward_instances(
batched_inputs, class_codes
) # proposal and losses
| facebookresearch/sylph-few-shot-detection | sylph/modeling/meta_arch/few_shot_rcnn.py | few_shot_rcnn.py | py | 13,777 | python | en | code | 54 | github-code | 1 | [
{
"api_name": "logging.getLogger",
"line_number": 15,
"usage_type": "call"
},
{
"api_name": "meta_one_stage_detector.MetaProposalNetwork",
"line_number": 21,
"usage_type": "name"
},
{
"api_name": "detectron2.modeling.meta_arch.GeneralizedRCNN",
"line_number": 21,
"usage_t... |
1077528090 | import keras.models
import sys
import simplejson as json
model = keras.models.load_model('./modeli/prt10.43.hdf5')
#"vreme" , "temperatura", "vlaznost", "pritisak", "brzina", "oblacnost", "dan u nedelji" , "mesec"
args = sys.argv
path_json = args[1]
with open(path_json) as json_file:
data = json.load(json_file)
vreme = data["vreme"]
temperatura = data["temperatura"]
vlaznost = data["vlaznost"]
pritisak = data["pritisak"]
brzina = data["brzina"]
oblacnost = data["oblacnost"]
dan_u_nedelji = data["dan u nedelji"]
mesec = data["mesec"]
ulaz_u_mrezu= [[vreme, temperatura, vlaznost, pritisak, brzina, oblacnost, dan_u_nedelji, mesec]]
y_predicted = model.predict(ulaz_u_mrezu)
predvidjeno_cestica = {"PM10": str(y_predicted[0][0])}
with open('./izlazi/predvidjanje.txt', 'w') as f:
json.dump(predvidjeno_cestica, f, ensure_ascii=False)
| mladjan-gadzic/matf-hackathon | ML/mreza.py | mreza.py | py | 892 | python | hr | code | 0 | github-code | 1 | [
{
"api_name": "keras.models.models.load_model",
"line_number": 7,
"usage_type": "call"
},
{
"api_name": "keras.models.models",
"line_number": 7,
"usage_type": "attribute"
},
{
"api_name": "keras.models",
"line_number": 7,
"usage_type": "name"
},
{
"api_name": "sys... |
15850335590 | import os
import requests
import time
from typing import Any
from .core.enums import Chain
from .core.base import Web3Connector
from . import log
__all__ = [
"Etherscan",
"Etherscanner",
]
class ResponseParser:
@staticmethod
def parse(response: requests.Response) -> Any:
content = response.json()
result = content["result"]
if "status" in content.keys():
status = bool(int(content["status"]))
message = content["message"]
assert status, f"{result} -- {message}"
else:
raise ValueError(f"failed to get status from response {content}")
return result
class Etherscan:
_api_key: str
_base_url: str
_retry_wait_seconds: float = 1.001 # retry after this seconds
_max_retries: int = 5
__api_key_env_var_map = {
Chain.ETHEREUM: "ETHERSCAN_API_KEY",
Chain.ARBITRUM: "ARBISCAN_API_KEY",
Chain.OPTIMISM: "OPTIMISTIC_SCAN_API_KEY",
Chain.BINANCE: "BSCSCAN_API_KEY",
}
__base_url_map = {
Chain.ETHEREUM: "https://api.etherscan.io/api?",
Chain.ARBITRUM: "https://api.arbiscan.io/api?",
Chain.OPTIMISM: "https://optimistic.etherscan.io/api?",
Chain.BINANCE: "https://api.bscscan.com/api?",
}
def __init__(self, chain: Chain) -> None:
api_key_env_var = self.__api_key_env_var_map[chain]
self._api_key = os.environ[api_key_env_var]
self._base_url = self.__base_url_map[chain]
def get(self, **kw):
kw["apikey"] = self._api_key
url = self._base_url + "&".join([f"{k}={v}" for k, v in kw.items()])
retries = 0
while retries < self._max_retries:
try:
r = requests.get(url, headers={"User-Agent": ""})
return ResponseParser.parse(r)
except Exception as e:
print(f"{url} failed with error:\n{e}")
print(f"waiting for {self._retry_wait_seconds} seconds...")
time.sleep(self._retry_wait_seconds)
retries += 1
def get_block_number_by_timestamp(self, timestamp: int) -> int:
kw = dict(
module = "block",
action = "getblocknobytime",
timestamp = timestamp,
closest = "before",
apikey = self._api_key,
)
return int(self.get(**kw))
class Etherscanner(Web3Connector):
_scan: Etherscan
_chain: Chain
def init_scan(self, chain: Chain):
self._scan = Etherscan(chain)
@property
def scan(self) -> Etherscan:
return self._scan
def get_abi(self, addr: str) -> list:
""" Get abi from contract address.
"""
return self.scan.get(module="contract", action="getabi", address=addr) | idiotekk/unknownlib | lib/unknownlib/evm/etherscan.py | etherscan.py | py | 2,813 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "requests.Response",
"line_number": 19,
"usage_type": "attribute"
},
{
"api_name": "typing.Any",
"line_number": 19,
"usage_type": "name"
},
{
"api_name": "core.enums.Chain.ETHEREUM",
"line_number": 39,
"usage_type": "attribute"
},
{
"api_name": "core... |
19842701690 | import json
from backend.util.mapper import to_dict
class Question:
def __init__(self, question: str, options: dict, answer: str, vote: str, dataset_id: int, analysis: str, id=None):
self.id = id
self.question = question
self.options = options
self.answer = answer
self.vote = vote
self.dataset_id = dataset_id
self.analysis = analysis
self.notes = []
def with_notes(self, notes: list):
self.notes = notes
def to_dict(self):
_dict = to_dict(self)
_dict.pop('notes', None)
return _dict
def to_string(self):
options = '\n'.join([f"{key}. {self.options[key]}" for key in self.options])
vote_answer = self.vote.split(' ')[0]
alter_answers = self.answer if self.answer == vote_answer else f"{self.answer} or {vote_answer}"
return f"Question: {self.question}\n" \
f"{options}\n" \
f"Possible answers: {alter_answers}\n"
@staticmethod
def parse(row):
if row is None:
return None
return Question(
row[1],
json.loads(row[2]),
row[3],
row[4],
row[5],
row[6],
row[0]
)
| PengfeiMiao/smart-qa | backend/entity/question.py | question.py | py | 1,267 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "backend.util.mapper.to_dict",
"line_number": 21,
"usage_type": "call"
},
{
"api_name": "json.loads",
"line_number": 39,
"usage_type": "call"
}
] |
72348982114 | # pylint: disable=abstract-method
"""
Input schema for Kemux.
This is the schema that is used to validate incoming messages.
"""
import dataclasses
import types
import dateutil.parser
import faust
import faust.models.fields
import kemux.data.schema.base
# pylint: disable=protected-access
@dataclasses.dataclass
class InputSchema(kemux.data.schema.base.Schema):
"""
InputSchema Class
The schema that is used to validate incoming messages.
"""
@classmethod
def construct_input_record_class(cls) -> None:
"""
Factory used to construct the faust.Record subclass that is used to accept and validate incoming messages.
Raises:
ValueError: If a validator is not a valid callable.
"""
class InputRecord(
faust.Record,
serializer='json',
date_parser=dateutil.parser.parse
):
"""
InputRecord Class
The faust.Record that is used to accept and validate incoming messages.
"""
def validate_message(self) -> None:
"""
Validate the message using validators defined by the user.
These validators follow the following naming pattern: "_<field_name>_validator"
Raises:
ValueError: If a validator is not a valid callable.
"""
message_data = self.__dict__ # pylint: disable=eval-used
for field in cls.fields:
validator_name = f'_{field}_validator'
validator = getattr(
self.__class__,
validator_name
)
if not isinstance(validator, types.FunctionType):
raise ValueError(f'Validator: {validator_name} is not callable')
actual_field_value = message_data.get(field)
validator(actual_field_value)
def to_dict(self) -> dict:
"""
Convert the record to a dict.
Returns:
dict: The input record as a dict.
"""
return {
field: self.__dict__.get(field)
for field in cls.fields
}
for field_name, field_type in cls.fields.items():
InputRecord.__annotations__[field_name] = field_type
setattr(
InputRecord,
field_name,
faust.models.fields.FieldDescriptor(
required=True,
exclude=False,
default=None,
type=field_type
)
)
implemented_validators = [
getattr(cls, field)
for field in cls.__dict__
if field.endswith('_validator')
]
for validator in implemented_validators:
if not isinstance(validator, types.FunctionType):
raise ValueError(f'Validator: {validator} is not callable')
setattr(InputRecord, validator.__name__, validator)
cls.record_class = InputRecord # type: ignore
@classmethod
def asdict(cls) -> dict[str, type]:
"""
Convert the nested faust.Record subclass to a dict.
Returns:
dict: The nested faust.Record subclass as a dict.
"""
return cls.record_class.asdict()
| kamilrybacki/Kemux | kemux/data/schema/input.py | input.py | py | 3,496 | python | en | code | 1 | github-code | 1 | [
{
"api_name": "kemux.data.schema.base.data",
"line_number": 20,
"usage_type": "attribute"
},
{
"api_name": "kemux.data.schema.base",
"line_number": 20,
"usage_type": "name"
},
{
"api_name": "faust.Record",
"line_number": 37,
"usage_type": "attribute"
},
{
"api_nam... |
20593187659 | import pycurl
from urllib import parse
class LexofficeUpload:
"""
A class for uploading invoice documents
"""
def __init__(self, apiToken: str) -> None:
self.apiUrl = 'https://api.lexoffice.io/v1/files'
self.apiToken = apiToken
def fileUpload(self, tmpFile, fileName: str):
"""
Upload a file to a Lexoffice Account
"""
tmpFile.seek(0) # Go back to beginning of file to read from tmpFile after writing
try:
c = pycurl.Curl()
c.setopt(c.URL, self.apiUrl)
c.setopt(c.POST, 1)
c.setopt(c.HTTPPOST, [
("file", (
c.FORM_FILE, tmpFile.name,
c.FORM_FILENAME, parse.quote(fileName)
)
),
("type", "voucher")
])
c.setopt(pycurl.HTTPHEADER, [
f"Authorization: Bearer {self.apiToken}",
"Content-Type: multipart/form-data",
"Accept: application/json"
])
response = c.perform_rs()
c.close()
return response
except Exception as e:
exit("Error: File upload failed\n\n" + str(e))
| Maki-IT/lexoffice-invoice-upload | invoice/uploader/uploader.py | uploader.py | py | 1,243 | python | en | code | 9 | github-code | 1 | [
{
"api_name": "pycurl.Curl",
"line_number": 22,
"usage_type": "call"
},
{
"api_name": "urllib.parse.quote",
"line_number": 28,
"usage_type": "call"
},
{
"api_name": "urllib.parse",
"line_number": 28,
"usage_type": "name"
},
{
"api_name": "pycurl.HTTPHEADER",
"... |
25378068112 | # The data updating script
# Brings in query from the file social.sql
# Working from file means the SQL code can be developed and tested
# directly within the Postgres environment and psycopg2 creates access
# to that code from within Python
# Unclear what context would require this psycopg2 setup with no real
# Python value-add. Seems a shell script could run the command
# 'psql socialmedia < social.sql' on a regular basis. Seems psycopg2
# is more useful when there is some additional manipulation of the query
# needed that is not easily done in shell
import psycopg2
print('.. ', end='')
conn = psycopg2.connect(dbname='socialmedia')
c = conn.cursor()
with open('social.sql', 'r') as q:
query = q.read().replace('\n', ' ')
c.execute(query)
conn.commit()
print('The daily_update table is refreshed ..')
conn.close()
| gusmairs/sql-projects | dsi-pyth/daily_update.py | daily_update.py | py | 829 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "psycopg2.connect",
"line_number": 15,
"usage_type": "call"
}
] |
3176997772 | import os
import tweepy
api_handle = None
def get_authorization():
TWITTER_API_KEY = os.getenv('TWITTER_API_KEY', None)
TWITTER_API_SECRET = os.getenv('TWITTER_API_SECRET', None)
auth = tweepy.AppAuthHandler(TWITTER_API_KEY, TWITTER_API_SECRET)
return auth
def get_api():
global api_handle
if api_handle:
return api_handle
print("Connecting to twitter.")
auth = get_authorization()
api = tweepy.API(auth, wait_on_rate_limit=True,
wait_on_rate_limit_notify=True)
if (not api):
raise Exception("Cannot connect to twitter using provided credentials")
api_handle = api
return api
| amtsh/vdeos.me | app/models/twitter/auth.py | auth.py | py | 670 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "os.getenv",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "os.getenv",
"line_number": 9,
"usage_type": "call"
},
{
"api_name": "tweepy.AppAuthHandler",
"line_number": 11,
"usage_type": "call"
},
{
"api_name": "tweepy.API",
"line_number... |
32821607542 | from datetime import datetime
import os.path
import vcr
from csep.utils.comcat import search
HOST = 'webservices.rm.ingv.it'
def get_datadir():
root_dir = os.path.dirname(os.path.abspath(__file__))
data_dir = os.path.join(root_dir, 'artifacts', 'BSI')
return data_dir
def test_search():
datadir = get_datadir()
tape_file = os.path.join(datadir, 'vcr_search.yaml')
with vcr.use_cassette(tape_file):
# L'Aquila
eventlist = search(starttime=datetime(2009, 4, 6, 0, 0, 0),
endtime=datetime(2009, 4, 7, 0, 0, 0),
minmagnitude=5.5, host=HOST, limit=15000, offset=0)
event = eventlist[0]
assert event.id == 1895389
def test_summary():
datadir = get_datadir()
tape_file = os.path.join(datadir, 'vcr_summary.yaml')
with vcr.use_cassette(tape_file):
eventlist = search(starttime=datetime(2009, 4, 6, 0, 0, 0),
endtime=datetime(2009, 4, 7, 0, 0, 0),
minmagnitude=5.5, host=HOST, limit=15000, offset=0)
event = eventlist[0]
cmp = '1895389 2009-04-06 01:32:40.400000 (42.342,13.380) 8.3 km M6.1'
assert str(event) == cmp
assert event.id == 1895389
assert event.time == datetime(2009, 4, 6, 1, 32, 40, 400000)
assert event.latitude == 42.342
assert event.longitude == 13.380
assert event.depth == 8.3
assert event.magnitude == 6.1
| SCECcode/pycsep | tests/test_bsi.py | test_bsi.py | py | 1,480 | python | en | code | 40 | github-code | 1 | [
{
"api_name": "os.path.path.dirname",
"line_number": 10,
"usage_type": "call"
},
{
"api_name": "os.path.path",
"line_number": 10,
"usage_type": "attribute"
},
{
"api_name": "os.path",
"line_number": 10,
"usage_type": "name"
},
{
"api_name": "os.path.path.abspath",... |
32156408206 | import pytest
import six
from pymarketstore import jsonrpc
from unittest.mock import patch
import importlib
importlib.reload(jsonrpc)
@patch.object(jsonrpc, 'requests')
def test_jsonrpc(requests):
requests.Session().post.return_value = 'dummy_data'
cli = jsonrpc.MsgpackRpcClient('http://localhost:5993/rcp')
result = cli._rpc_request('DataService.Query', a=1)
assert result == 'dummy_data'
resp = {
'jsonrpc': '2.0',
'id': 1,
'result': {'ok': True},
}
assert cli._rpc_response(resp)['ok']
del resp['result']
resp['error'] = {
'message': 'Error',
'data': 'something',
}
with pytest.raises(Exception) as e:
cli._rpc_response(resp)
assert 'Error: something' in str(e)
| alpacahq/pymarketstore | tests/test_jsonrpc.py | test_jsonrpc.py | py | 768 | python | en | code | 101 | github-code | 1 | [
{
"api_name": "importlib.reload",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "pymarketstore.jsonrpc",
"line_number": 8,
"usage_type": "argument"
},
{
"api_name": "pymarketstore.jsonrpc.MsgpackRpcClient",
"line_number": 15,
"usage_type": "call"
},
{
"a... |
19659631050 | import logging
import shutil
from datetime import datetime, timedelta
from itertools import chain
from pathlib import Path
from zipfile import ZipFile
logger = logging.getLogger(__name__)
MAX_STORAGE_DAYS = 90
FREE_SPACE_PERCENT = 10
def get_file_datetime(path):
year, month, day = path.parent.parts[-3:]
return datetime(int(year), int(month), int(day))
def free_storage_memory():
total, used, free = shutil.disk_usage('./storage')
if free / total * 100 >= FREE_SPACE_PERCENT:
logger.info("You don't need to archive and move files. Space of storage is more than 10%")
return
patterns = ("*.wav", "*.mp3")
current_date = datetime.now()
iterable = chain.from_iterable(Path("./storage").rglob(pattern) for pattern in patterns)
storage_dir = None
with ZipFile("archive.zip", mode="w") as archive:
for path in sorted(iterable, key=get_file_datetime):
file_date = get_file_datetime(path)
if not storage_dir:
storage_dir = path.parent
if storage_dir != path.parent:
break
if file_date + timedelta(days=MAX_STORAGE_DAYS) < current_date:
archive.write(path, path.name)
logger.info(f"Added {path} file to {archive.filename}")
if not storage_dir:
logger.warning("Storage is empty. Exit")
return
if archive.namelist():
year, month, day = storage_dir.parts[-3:]
archive_path = Path("./archive") / year / month / day
archive_path.mkdir(parents=True, exist_ok=True)
shutil.move(archive.filename, archive_path)
logger.info(f"Moved {archive.filename} to {archive_path}")
shutil.rmtree(storage_dir)
logger.info(f"Removed {storage_dir} directory")
free_storage_memory()
def main():
# noinspection PyArgumentList
logging.basicConfig(
level=logging.DEBUG,
format="%(asctime)s - %(levelname)s - %(message)s",
handlers=[
logging.FileHandler("all.log", mode="a"),
logging.StreamHandler()
]
)
try:
free_storage_memory()
except Exception as exc:
logger.exception(exc)
if __name__ == '__main__':
main()
| Auranoz/test_storage | index.py | index.py | py | 2,253 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "logging.getLogger",
"line_number": 9,
"usage_type": "call"
},
{
"api_name": "datetime.datetime",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "shutil.disk_usage",
"line_number": 21,
"usage_type": "call"
},
{
"api_name": "datetime.datetim... |
25376970593 | import os
from pypcd import pypcd
from rich.progress import track
class Bin2Pcd:
def __init__(
self,
input,
input_dims,
output,
):
self.input = input
self.input_dims = input_dims
self.output = output
self.file_list = self.get_file_list()
# make output dir
if not os.path.exists(self.output):
os.makedirs(self.output)
def get_file_list(self):
if os.path.isdir(self.input):
file_list = os.listdir(self.input)
file_list = [os.path.join(self.input, file_name) for file_name in file_list]
else:
file_list = [self.input]
# filter file_list with .bin suffix
file_list = [file_name for file_name in file_list if file_name[-4:] == ".bin"]
return file_list
def run(self):
for bin_file in track(self.file_list):
pcd_file = bin_file.split("/")[-1][:-4] + ".pcd"
pcd_file = os.path.join(self.output, pcd_file)
# check file valid by check file size
if os.path.getsize(bin_file) == 0:
print("file size is 0, skip: ", bin_file)
continue
pc = pypcd.PointCloud.from_bin(bin_file, format=self.input_dims)
pc.save_pcd(pcd_file, compression="binary_compressed")
print("save pcd file to: ", pcd_file)
| windzu/apk | apk/format/bin2pcd/bin2pcd.py | bin2pcd.py | py | 1,398 | python | en | code | 2 | github-code | 1 | [
{
"api_name": "os.path.exists",
"line_number": 21,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_number": 21,
"usage_type": "attribute"
},
{
"api_name": "os.makedirs",
"line_number": 22,
"usage_type": "call"
},
{
"api_name": "os.path.isdir",
"line_num... |
23090120889 | from pathlib import Path
import os
import subprocess
import time
from urllib.parse import unquote_plus
import httpx
from api.settings import API_TOKEN
# this assumes API server is running at :5000 and that worker is also running
DELAY = 1 # seconds
MAX_RETRIES = 240
API_URL = "http://localhost:5000"
# API_URL = "http://localhost:8080"
# API_URL = "http://localhost:8080/southeastssa"
OUT_DIR = Path("/tmp/api")
if not OUT_DIR.exists():
os.makedirs(OUT_DIR)
def poll_until_done(job_id, current=0, max=MAX_RETRIES):
r = httpx.get(f"{API_URL}/api/reports/status/{job_id}?token={API_TOKEN}")
if r.status_code != 200:
raise Exception(f"Error processing request (HTTP {r.status_code}): {r.text}")
json = r.json()
status = json.get("status")
progress = json.get("progress")
message = json.get("message")
errors = json.get("errors", [])
task = json.get("task")
if status == "success":
return task, json["result"], errors
if status == "failed":
print(f"Failed: {json['detail']}")
return task, None, errors
print(f"Progress: {progress}, message: {message}, errors: {errors}")
current += 1
if current == max:
print("Max retries hit, stopping...")
return task, None, None
time.sleep(DELAY)
return poll_until_done(job_id, current=current, max=max)
def test_upload_file(filename):
files = {"file": open(filename, "rb")}
r = httpx.post(
f"{API_URL}/api/upload?token={API_TOKEN}",
files=files,
)
if r.status_code != 200:
raise Exception(f"Error processing request (HTTP {r.status_code}): {r.text}")
json = r.json()
job_id = json.get("job")
if job_id is None:
raise Exception(json)
return poll_until_done(job_id)
def test_create_report(uuid, datasets, field, name):
r = httpx.post(
f"{API_URL}/api/report?token={API_TOKEN}",
data={"uuid": uuid, "datasets": datasets, "field": field, "name": name},
)
if r.status_code != 200:
raise Exception(f"Error processing request (HTTP {r.status_code}): {r.text}")
json = r.json()
job_id = json.get("job")
if job_id is None:
raise Exception(json)
return poll_until_done(job_id)
def download_file(url):
r = httpx.get(f"{API_URL}{url}")
attachment_filename = r.headers["content-disposition"].split("filename*=utf-8''")[1]
filename = OUT_DIR / unquote_plus(attachment_filename)
with open(filename, "wb") as out:
out.write(r.read())
return filename
if __name__ == "__main__":
# name, filename = [
# "Balduina atropurpurea",
# "examples/Balduina_pop_resiliency_final.zip",
# ]
# name, filename = [
# "Rabbitsfoot",
# "examples/Rabbitsfott_resilience_final_SECAS_only.zip",
# ]
name, filename = ["Test species", "examples/SingleTest.zip"]
task, result, errors = test_upload_file(filename)
print(f"----------------\ntask: {task}\nresult: {result}\nerrors: {errors}\n")
if result is not None:
uuid = result["uuid"]
if result.get("count") == 1:
field = None
else:
# arbitrarily pick first field available
field = list(result["fields"].keys())[0]
# include all present datasets in analysis
datasets = ",".join(
dataset
for dataset, present in result["available_datasets"].items()
if present
)
task, result, errors = test_create_report(uuid, datasets, field, name)
print(f"----------------\ntask: {task}\nresult: {result}\nerrors: {errors}\n")
outfilename = download_file(result)
subprocess.run(["open", outfilename])
| astutespruce/secas-ssa | tests/test_report_api.py | test_report_api.py | py | 3,742 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "pathlib.Path",
"line_number": 21,
"usage_type": "call"
},
{
"api_name": "os.makedirs",
"line_number": 24,
"usage_type": "call"
},
{
"api_name": "httpx.get",
"line_number": 28,
"usage_type": "call"
},
{
"api_name": "api.settings.API_TOKEN",
"line... |
41728221583 | #!/usr/bin/env python
"""
This scrapes Streak for the Cash information from ESPN and writes it to an Excel spreadsheet.
It uses BeautifulSoup 4 and xlsxwriter.
In order to use this, you will need to download bs4, lxml, and xlsxwriter.
Prop array format: [Sport, League, Prop, Active Picks %, Winner, Winner %, Loser, Loser %]
Command format: python props.py [number of days]
"""
import urllib.request
import re
import xlsxwriter
import operator
import collections
import sys
import ast
import os
from datetime import date, timedelta, datetime
from bs4 import BeautifulSoup
def _format_url(date):
"""Format ESPN link to scrape records from."""
link = ['http://streak.espn.go.com/en/entry?date=' + date]
print(date)
return link[0]
def scrape_props(espn_page, allProps):
"""Scrape ESPN's pages for data."""
url = urllib.request.urlopen(espn_page)
soup = BeautifulSoup(url.read(), 'lxml')
props = soup.find_all('div', attrs={'class': 'matchup-container'})
for prop in props:
propArray = []
time = prop.find('span', {'class': 'startTime'})['data-locktime']
time = time[:-4]
format_time = datetime.strptime(time, '%B %d, %Y %I:%M:%S %p')
sport = prop.find('div', {'class': 'sport-description'})
if sport:
propArray.append(sport.text)
else:
propArray.append('Adhoc')
title = prop.find(
'div', {'class': ['gamequestion', 'left']}).text.split(': ')
propArray.append(title[0])
propArray.append(title[1])
overall_percentage = prop.find('div', {'class': 'progress-bar'})
propArray.append(overall_percentage['title'].split(' ')[3])
percentages = prop.find_all('span', {'class': 'wpw'})
info = prop.find_all('span', {'class': 'winner'})
temp = info[0].parent.find_all('span', {'id': 'oppAddlText'})
[rec.extract() for rec in temp]
temp = info[1].parent.find_all('span', {'id': 'oppAddlText'})
[rec.extract() for rec in temp]
if info[0].contents[0].name == 'img':
propArray.append(info[0].parent.get_text())
propArray.append(percentages[0].text)
propArray.append(info[1].parent.get_text()[1:])
propArray.append(percentages[1].text)
else:
propArray.append(info[1].parent.get_text())
propArray.append(percentages[1].text)
propArray.append(info[0].parent.get_text()[1:])
propArray.append(percentages[0].text)
# Prop array format:
# [Sport, League, Prop, Active Picks %, Winner, Winner %, Loser, Loser %]
allProps[format_time.date()].append(propArray)
def write_to_excel(allProps):
wb = xlsxwriter.Workbook('Streak.xlsx')
ws = wb.add_worksheet('All Props')
#ws.set_column(0, 0, 2.29)
#ws.set_column(1, 1, 14.14)
#ws.set_column(5, 5, 2.29)
date_format = wb.add_format({'num_format': 'mm/dd/yy'})
format_percentage = wb.add_format({'num_format': '0.0"%"'})
format_percentage2 = wb.add_format({'num_format': '0.00"%"'})
i = 0
for date in allProps:
for prop in allProps[date]:
ws.write(i, 0, date, date_format)
ws.write(i, 1, prop[0])
ws.write(i, 2, prop[1])
ws.write(i, 3, prop[2])
ws.write(i, 4, float(prop[3][:-1]), format_percentage2)
ws.write(i, 5, prop[4])
ws.write(i, 6, float(prop[5][:-1]), format_percentage)
ws.write(i, 7, prop[6])
ws.write(i, 8, float(prop[7][:-1]), format_percentage)
i += 1
wb.close()
def main(argv):
allProps = collections.OrderedDict()
for x in range(0, int(argv[0])):
newDate = date.today() - timedelta(days=x + 1)
allProps[newDate] = []
scrape_props(_format_url(newDate.strftime('%Y%m%d')), allProps)
write_to_excel(allProps)
if __name__ == '__main__':
import time
start = time.time()
main(sys.argv[1:])
print(time.time() - start, 'seconds')
| keveleigh/espn-sftc | props.py | props.py | py | 4,060 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "datetime.date",
"line_number": 28,
"usage_type": "name"
},
{
"api_name": "datetime.date",
"line_number": 29,
"usage_type": "argument"
},
{
"api_name": "urllib.request.request.urlopen",
"line_number": 35,
"usage_type": "call"
},
{
"api_name": "urllib... |
34806501971 | ''' Common pricing methods corresponding to Interest rate Instruments '''
import datetime as dt
#from collections import OrderedDict
import json
import os
import scipy as sci
import numpy as np
import pandas as pd
# import interest_rate_base as intbase
import interest_rate_dates as intdate
import interest_rate_discount_lorimier as intdisc
import curve_constructor as cb
import pyfi_filter as fi_filter
class curve_builder_lorimier(cb.curve_builder):
''' Class constructing applying LORIMEIR mmethod:
methods: (3): Hilbert Space Smoothing Spline
'''
def __init__(self, options, alpha=0, dbg=False):
''' Constructor Depends options dictionary && dataframe of
- zero coupon yields and maturities
'''
if isinstance(options, str) and os.path.exists(options):
with open(options, "r") as fp:
init_options = json.load(fp)
fp.close()
elif isinstance(options, dict):
init_options = options.copy()
else:
raise ValueError("Faulty -- options specification ")
self.df = None
self.alpha = alpha
self.determine_instruments(init_options, init_options['data']['file'], dbg=dbg)
super().__init__(init_options, method=3, dbg=dbg)
self.calc_exact_method0()
def determine_instruments(self, options, filename, dbg=False):
''' Loads Data Frame of acceptable instruments '''
if 'instruments' not in options.keys() and isinstance(filename, str) and\
os.path.exists(filename):
df = pd.read_csv(filename, index_col=0)
elif 'instruments' in options.keys():
if dbg:
print("instruments already exist")
else:
raise ValueError("No Instruments Found")
if isinstance(df, pd.DataFrame) and all(df.shape) > 0:
self.df = df.copy()
if self.df.shape[0] < 1:
raise ValueError("Data Frame contains only excluded items")
# TODO Apply repeated filter concept here
if 'filter' in options.keys() and isinstance(options['filter'], dict):
obj = self.determine_filter_obj(options['filter'], dbg)
ind_inc = obj.include()
self.df = self.df[ind_inc]
self.df['excluded'] = False
elif 'filter' in options.keys() and isinstance(options['filter'], list):
for itm in options['filter']:
obj = self.determine_filter_obj(itm, dbg)
ind_inc = obj.include()
self.df = self.df[ind_inc]
self.df['excluded'] = False
else:
raise ValueError("Empty Data Frame")
options["instruments"] = {}
for cnt, itm in enumerate(self.df.iterrows()):
name = ("PRINC_STRIP" if itm[1]['Description'].find("STRIPPED PRIN") >= 0 else
"TREAS_BILL")
day_count = ("30_360" if itm[1]['Description'].find("STRIPPED PRIN") >= 0 else
"act_360")
name = name + str(cnt+1)
if "Price" in self.df.columns:
price = itm[1]['Price']
if "maturity_date" in self.df.columns:
maturity_date = dt.datetime.strptime(itm[1]['maturity_date'], "%m/%d/%Y")
maturity_date = dt.datetime.strftime(maturity_date,
options["control"]["date_format"])
options["instruments"][name] = {"type": "ZERO COUPON", "price": price,
"date": maturity_date, "day_count": day_count}
def determine_filter_obj(self, item, dbg=False):
''' Calculates Filter object '''
if isinstance(item, dict) and "operator" in item.keys():
obj = fi_filter.fixed_income_filter_logical(item, self.df, dbg=dbg)
elif isinstance(item, dict) and "type" in item.keys() and\
item['type'].upper() == "NUMERIC":
obj = fi_filter.fixed_income_filter_numeric(item, self.df, dbg=dbg)
elif isinstance(item, dict) and "type" in item.keys() and\
item['type'].upper() == "TEXT":
obj = fi_filter.fixed_income_filter(item, self.df, dbg=dbg)
elif isinstance(item, dict) and "type" in item.keys() and\
item['type'].upper() == "REPEATED_MATURITY":
obj = fi_filter.repeated_maturity_filter(item, self.df, dbg=dbg)
if dbg:
print("Repeated Count %d" % (obj.get_gt_one_count()))
else:
if dbg:
print(item)
raise ValueError("Faulty Item")
return obj
def build_arrays(self):
''' Constructs DataFrame elements '''
instrument_cnt = len(self.instruments)
if self.cf_matrix is None:
self.cf_matrix = np.zeros([instrument_cnt+1, instrument_cnt+1])
self.cf_prices = np.zeros([instrument_cnt+1])
for i, zero in enumerate(self.instruments.values()):
self.cf_prices[i+1] = self.alpha*zero.get_maturity()*zero.get_yield()
self.cf_matrix[0][i+1] = zero.get_maturity()
self.cf_matrix[i+1][0] = self.alpha*zero.get_maturity()
for i, zero_i in enumerate(self.instruments.keys()):
for j, zero_j in enumerate(self.instruments.keys()):
self.cf_matrix[i+1][j+1] = self.lorimier_dot_prod(zero_i, zero_j)
if self.results is None: # # instruments X column count
if "columns" in self.options['control'].keys():
rows = len(self.options['control']["columns"])
mtrx = np.zeros([instrument_cnt, rows])
names_sorted = sorted(self.names, key=lambda x: self.names[x])
self.results = pd.DataFrame(mtrx, index=names_sorted,
columns=self.options['control']['columns'])
if self.dbg:
print("Construction NP array shape %i length %i" % (self.results.shape[0],
self.results.shape[1]))
def load_cf_results(self):
''' Loads results elemt upto (but not including) zeros '''
for key, zero in self.instruments.items():
self.load_data_row(position=key, rate=zero.get_price(),
date=zero.get_maturity(),
typ='ZERO')
def load_cf_matrices(self):
''' loads cash matrix elements (self.cf_matrix '''
def calc_exact_method0(self):
''' implements eaxct method with complete NxN matrix '''
res = False
if isinstance(self.cf_matrix, np.ndarray) and all(self.cf_matrix.shape) > 0 and\
isinstance(self.cf_prices, np.ndarray) and\
self.cf_prices.shape[0] == self.cf_matrix.shape[0] == self.cf_matrix.shape[1]:
dates_dict = self.options.copy()
dates_dict['parameters'] = {}
dates_dict['parameters']['alpha'] = self.alpha
dates, _ = intdate.calc_schedule_list(
self.cf_dates.keys(), self.options)
beta = sci.linalg.solve(self.cf_matrix, self.cf_prices)
if self.dbg:
print(beta)
dates_dict['parameters']['beta'] = beta.copy()
dates_dict['parameters']['tau'] = []
for itm in self.instruments.values():
dates_dict['parameters']['tau'].append(itm.get_maturity())
dates_dict['parameters']['dates'] = intdate.generate_schedule_dict(
start=self.options['start_date'], period='S', count=dates,
convention=self.options['control']['convention'],
date_adjust=self.options['control']['date_adjust'])
prices = [bnd.get_price() for bnd in self.instruments.values()
if isinstance(bnd, cb.intrate.fi_instrument)]
self.zeros = intdisc.discount_calculator_lorimier(
prices, data_type=3, dates=dates_dict, dbg=self.dbg)
mapping_dict = {"zero": "zero", "maturity": "maturity", "date_diff": "date_diff",
"forward": "forward", "yield": "yield_hat"}
override = bool("control" in self.options.keys() and "override" in
self.options['control'].keys() and
int(self.options['control']['override']) > 0)
self.apply_zeros(mapping_dict, override)
res = True
else:
if self.dbg:
print("Warning -- zeros NOT calculated")
return res
def lorimier_dot_prod(self, h_i, h_j):
''' Calculates dot product of h_i, h_j based on lorimeir definition'''
v1 = self.instruments[h_i].get_maturity()
v2 = self.instruments[h_j].get_maturity()
res = v1*v2 + 0.5*min(v1, v2)**2*max(v1, v2) - min(v1, v2)**3/6.
res = (self.alpha*res + (1.0 if v1 == v2 else 0.0))
if self.dbg:
print(h_i, h_j, v1, v2)
return res
| slpenn13/pythoninterestrates | src/curve_constructor_lorimier.py | curve_constructor_lorimier.py | py | 9,174 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "curve_constructor.curve_builder",
"line_number": 16,
"usage_type": "attribute"
},
{
"api_name": "os.path.exists",
"line_number": 26,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_number": 26,
"usage_type": "attribute"
},
{
"api_name": "json... |
20522502354 | """
Test for import machinery
"""
import unittest
import sys
import textwrap
import subprocess
import os
from PyInstaller.lib.modulegraph import modulegraph
class TestNativeImport (unittest.TestCase):
# The tests check that Python's import statement
# works as these tests expect.
def importModule(self, name):
if '.' in name:
script = textwrap.dedent("""\
try:
import %s
except ImportError:
import %s
print (%s.__name__)
""") %(name, name.rsplit('.', 1)[0], name)
else:
script = textwrap.dedent("""\
import %s
print (%s.__name__)
""") %(name, name)
p = subprocess.Popen(
[sys.executable, '-c', script],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'testpkg-relimport'),
encoding='utf8',
)
data = p.communicate()[0]
data = data.strip()
if data.endswith(' refs]'):
# with --with-pydebug builds
data = data.rsplit('\n', 1)[0].strip()
sts = p.wait()
if sts != 0:
print (data)
self.assertEqual(sts, 0)
return data
def testRootModule(self):
m = self.importModule('mod')
self.assertEqual(m, 'mod')
def testRootPkg(self):
m = self.importModule('pkg')
self.assertEqual(m, 'pkg')
def testSubModule(self):
m = self.importModule('pkg.mod')
self.assertEqual(m, 'pkg.mod')
# python3 always has __future__.absolute_import
def testOldStyle(self):
m = self.importModule('pkg.oldstyle.mod')
self.assertEqual(m, 'mod')
def testNewStyle(self):
m = self.importModule('pkg.toplevel.mod')
self.assertEqual(m, 'mod')
def testRelativeImport(self):
m = self.importModule('pkg.relative.mod')
self.assertEqual(m, 'pkg.mod')
m = self.importModule('pkg.subpkg.relative.mod')
self.assertEqual(m, 'pkg.mod')
m = self.importModule('pkg.subpkg.mod2.mod')
self.assertEqual(m, 'pkg.sub2.mod')
m = self.importModule('pkg.subpkg.relative2')
self.assertEqual(m, 'pkg.subpkg.relative2')
class TestModuleGraphImport (unittest.TestCase):
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, value, types):
if not isinstance(value, types):
self.fail("%r is not an instance of %r"%(value, types))
def setUp(self):
root = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'testpkg-relimport')
self.mf = modulegraph.ModuleGraph(path=[ root ] + sys.path)
#self.mf.debug = 999
self.script_name = os.path.join(root, 'script.py')
self.mf.add_script(self.script_name)
def testGraphStructure(self):
# 1. Script to imported modules
n = self.mf.find_node(self.script_name)
self.assertIsInstance(n, modulegraph.Script)
imported = ('mod', 'pkg', 'pkg.mod', 'pkg.oldstyle',
'pkg.relative', 'pkg.toplevel', 'pkg.subpkg.relative',
'pkg.subpkg.relative2', 'pkg.subpkg.mod2')
for nm in imported:
n2 = self.mf.find_node(nm)
ed = self.mf.edgeData(n, n2)
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=False, conditional=False, function=False, tryexcept=False))
refs = self.mf.outgoing(n)
self.assertEqual(set(refs), set(self.mf.find_node(nm) for nm in imported))
refs = list(self.mf.incoming(n))
# The script is a toplevel item and is therefore referred to from the graph root (aka 'None')
# FIXME fails since PyInstaller skips edges pointing to the current
# graph, see change 49c725e9f5a79b65923b8e1bfdd794f0f6f7c4bf
#self.assertEqual(refs, [None])
# 2. 'mod'
n = self.mf.find_node('mod')
self.assertIsInstance(n, modulegraph.SourceModule)
refs = list(self.mf.outgoing(n))
self.assertEqual(refs, [])
#refs = list(self.mf.incoming(n))
#self.assertEquals(refs, [])
# 3. 'pkg'
n = self.mf.find_node('pkg')
self.assertIsInstance(n, modulegraph.Package)
refs = list(self.mf.outgoing(n))
self.maxDiff = None
self.assertEqual(refs, [n])
#refs = list(self.mf.incoming(n))
#self.assertEquals(refs, [])
# 4. pkg.mod
n = self.mf.find_node('pkg.mod')
self.assertIsInstance(n, modulegraph.SourceModule)
refs = set(self.mf.outgoing(n))
self.assertEqual(refs, set([self.mf.find_node('pkg')]))
ed = self.mf.edgeData(n, self.mf.find_node('pkg'))
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=False, conditional=False, function=False, tryexcept=False))
# 5. pkg.oldstyle
n = self.mf.find_node('pkg.oldstyle')
self.assertIsInstance(n, modulegraph.SourceModule)
refs = set(self.mf.outgoing(n))
n2 = self.mf.find_node('mod')
self.assertEqual(refs, set([self.mf.find_node('pkg'), n2]))
ed = self.mf.edgeData(n, n2)
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=False, conditional=False, function=False, tryexcept=False))
# 6. pkg.relative
n = self.mf.find_node('pkg.relative')
self.assertIsInstance(n, modulegraph.SourceModule)
refs = set(self.mf.outgoing(n))
self.assertEqual(refs, set([self.mf.find_node('__future__'), self.mf.find_node('pkg'), self.mf.find_node('pkg.mod')]))
ed = self.mf.edgeData(n, self.mf.find_node('pkg.mod'))
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=True, conditional=False, function=False, tryexcept=False))
ed = self.mf.edgeData(n, self.mf.find_node('__future__'))
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=False, conditional=False, function=False, tryexcept=False))
#ed = self.mf.edgeData(n, self.mf.find_node('__future__.absolute_import'))
#self.assertIsInstance(ed, modulegraph.DependencyInfo)
#self.assertEqual(ed, modulegraph.DependencyInfo(
#fromlist=True, conditional=False, function=False, tryexcept=False))
# 7. pkg.toplevel
n = self.mf.find_node('pkg.toplevel')
self.assertIsInstance(n, modulegraph.SourceModule)
refs = set(self.mf.outgoing(n))
self.assertEqual(refs, set([self.mf.find_node('__future__'), self.mf.find_node('pkg'), self.mf.find_node('mod')]))
ed = self.mf.edgeData(n, self.mf.find_node('mod'))
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=False, conditional=False, function=False, tryexcept=False))
ed = self.mf.edgeData(n, self.mf.find_node('__future__'))
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=False, conditional=False, function=False, tryexcept=False))
#ed = self.mf.edgeData(n, self.mf.find_node('__future__.absolute_import'))
#self.assertIsInstance(ed, modulegraph.DependencyInfo)
#self.assertEqual(ed, modulegraph.DependencyInfo(
#fromlist=True, conditional=False, function=False, tryexcept=False))
# 8. pkg.subpkg
n = self.mf.find_node('pkg.subpkg')
self.assertIsInstance(n, modulegraph.Package)
refs = set(self.mf.outgoing(n))
self.assertEqual(refs, set([self.mf.find_node('pkg')]))
ed = self.mf.edgeData(n, self.mf.find_node('pkg'))
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=False, conditional=False, function=False, tryexcept=False))
# 9. pkg.subpkg.relative
n = self.mf.find_node('pkg.subpkg.relative')
self.assertIsInstance(n, modulegraph.SourceModule)
refs = set(self.mf.outgoing(n))
self.assertEqual(refs, set([self.mf.find_node('__future__'), self.mf.find_node('pkg'), self.mf.find_node('pkg.subpkg'), self.mf.find_node('pkg.mod')]))
ed = self.mf.edgeData(n, self.mf.find_node('pkg.subpkg'))
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=False, conditional=False, function=False, tryexcept=False))
ed = self.mf.edgeData(n, self.mf.find_node('pkg.mod'))
self.assertIsInstance(ed, modulegraph.DependencyInfo)
self.assertEqual(ed, modulegraph.DependencyInfo(
fromlist=True, conditional=False, function=False, tryexcept=False))
# 10. pkg.subpkg.relative2
n = self.mf.find_node('pkg.subpkg.relative2')
self.assertIsInstance(n, modulegraph.SourceModule)
refs = set(self.mf.outgoing(n))
self.assertEqual(refs, set([self.mf.find_node('pkg.subpkg'), self.mf.find_node('pkg.relimport'), self.mf.find_node('__future__')]))
# 10. pkg.subpkg.mod2
n = self.mf.find_node('pkg.subpkg.mod2')
self.assertIsInstance(n, modulegraph.SourceModule)
refs = set(self.mf.outgoing(n))
self.assertEqual(refs, set([
self.mf.find_node('__future__'),
self.mf.find_node('pkg.subpkg'),
self.mf.find_node('pkg.sub2.mod'),
self.mf.find_node('pkg.sub2'),
]))
def testRootModule(self):
node = self.mf.find_node('mod')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(node.identifier, 'mod')
def testRootPkg(self):
node = self.mf.find_node('pkg')
self.assertIsInstance(node, modulegraph.Package)
self.assertEqual(node.identifier, 'pkg')
def testSubModule(self):
node = self.mf.find_node('pkg.mod')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(node.identifier, 'pkg.mod')
def testOldStyle(self):
node = self.mf.find_node('pkg.oldstyle')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(node.identifier, 'pkg.oldstyle')
sub = [ n for n in self.mf.get_edges(node)[0] if n.identifier != '__future__' ][0]
self.assertEqual(sub.identifier, 'mod')
def testNewStyle(self):
node = self.mf.find_node('pkg.toplevel')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(node.identifier, 'pkg.toplevel')
sub = [ n for n in self.mf.get_edges(node)[0] if not n.identifier.startswith('__future__')][0]
self.assertEqual(sub.identifier, 'mod')
def testRelativeImport(self):
node = self.mf.find_node('pkg.relative')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(node.identifier, 'pkg.relative')
sub = [ n for n in self.mf.get_edges(node)[0] if not n.identifier.startswith('__future__') ][0]
self.assertIsInstance(sub, modulegraph.Package)
self.assertEqual(sub.identifier, 'pkg')
node = self.mf.find_node('pkg.subpkg.relative')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(node.identifier, 'pkg.subpkg.relative')
sub = [ n for n in self.mf.get_edges(node)[0] if not n.identifier.startswith('__future__') ][0]
self.assertIsInstance(sub, modulegraph.Package)
self.assertEqual(sub.identifier, 'pkg')
node = self.mf.find_node('pkg.subpkg.mod2')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(node.identifier, 'pkg.subpkg.mod2')
sub = [ n for n in self.mf.get_edges(node)[0] if not n.identifier.startswith('__future__') ][0]
self.assertIsInstance(sub, modulegraph.SourceModule)
self.assertEqual(sub.identifier, 'pkg.sub2.mod')
node = self.mf.find_node('pkg.subpkg.relative2')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(node.identifier, 'pkg.subpkg.relative2')
node = self.mf.find_node('pkg.relimport')
self.assertIsInstance(node, modulegraph.SourceModule)
class TestRegressions1 (unittest.TestCase):
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, value, types):
if not isinstance(value, types):
self.fail("%r is not an instance of %r", value, types)
def setUp(self):
root = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'testpkg-regr1')
self.mf = modulegraph.ModuleGraph(path=[ root ] + sys.path)
self.mf.add_script(os.path.join(root, 'main_script.py'))
def testRegr1(self):
node = self.mf.find_node('pkg.a')
self.assertIsInstance(node, modulegraph.SourceModule)
node = self.mf.find_node('pkg.b')
self.assertIsInstance(node, modulegraph.SourceModule)
def testMissingPathEntry(self):
root = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'nosuchdirectory')
try:
mf = modulegraph.ModuleGraph(path=[ root ] + sys.path)
except os.error:
self.fail('modulegraph initialiser raises os.error')
class TestRegressions2 (unittest.TestCase):
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, value, types):
if not isinstance(value, types):
self.fail("%r is not an instance of %r"%(value, types))
def setUp(self):
root = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'testpkg-regr2')
self.mf = modulegraph.ModuleGraph(path=[ root ] + sys.path)
self.mf.add_script(os.path.join(root, 'main_script.py'))
def testRegr1(self):
node = self.mf.find_node('pkg.base')
self.assertIsInstance(node, modulegraph.SourceModule)
node = self.mf.find_node('pkg.pkg')
self.assertIsInstance(node, modulegraph.SourceModule)
class TestRegressions3 (unittest.TestCase):
# NOTE: in its original variant, this test was using the `distutils`
# package as the test package; this has now been replaced with `json`.
#
# The reason is that with `setuptools`-provided distutils, modulegraph
# fails to account for the meta-path loader used by setuptools to
# override the distutils, and instead finds the stdlib `distutils`.
# This failure is independent of the scenario of this test, which
# involves `mypkg` creating a subpackage that essentially forwards
# to a stdlib package.
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, value, types):
if not isinstance(value, types):
self.fail("%r is not an instance of %r"%(value, types))
def assertStartswith(self, value, test):
if not value.startswith(test):
self.fail("%r does not start with %r"%(value, test))
def setUp(self):
root = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'testpkg-regr3')
self.mf = modulegraph.ModuleGraph(path=[ root ] + sys.path)
self.mf.add_script(os.path.join(root, 'script.py'))
def testRegr1(self):
node = self.mf.find_node('mypkg.json')
self.assertIsInstance(node, modulegraph.Package)
node = self.mf.find_node('mypkg.json.encoder')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertStartswith(node.filename, os.path.dirname(__file__))
import json.encoder, json.decoder
node = self.mf.find_node('json.encoder')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(os.path.dirname(node.filename),
os.path.dirname(json.encoder.__file__))
node = self.mf.find_node('json.decoder')
self.assertIsInstance(node, modulegraph.SourceModule)
self.assertEqual(os.path.dirname(node.filename),
os.path.dirname(json.decoder.__file__))
class TestRegression4 (unittest.TestCase):
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, value, types):
if not isinstance(value, types):
self.fail("%r is not an instance of %r"%(value, types))
def setUp(self):
root = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'testpkg-regr4')
self.mf = modulegraph.ModuleGraph(path=[ root ] + sys.path)
self.mf.add_script(os.path.join(root, 'script.py'))
def testRegr1(self):
node = self.mf.find_node('pkg.core')
self.assertIsInstance(node, modulegraph.Package)
node = self.mf.find_node('pkg.core.callables')
self.assertIsInstance(node, modulegraph.SourceModule)
node = self.mf.find_node('pkg.core.listener')
self.assertIsInstance(node, modulegraph.SourceModule)
node = self.mf.find_node('pkg.core.listenerimpl')
self.assertIsInstance(node, modulegraph.SourceModule)
class TestRelativeReferenceToToplevel (unittest.TestCase):
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, value, types):
if not isinstance(value, types):
self.fail("%r is not an instance of %r"%(value, types))
def test_relative_import_too_far(self):
# pkg.mod tries to import "..sys" (outside of the package...)
root = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'testpkg-regr7')
mf = modulegraph.ModuleGraph(path=[ root ] + sys.path)
mf.add_script(os.path.join(root, 'script.py'))
m = mf.find_node('')
self.assertIs(m, None)
m = mf.find_node('pkg.mod')
self.assertIsInstance(m, modulegraph.SourceModule)
imported = list(mf.get_edges(m)[0])
self.assertEqual(len(imported), 5)
im = imported[0]
self.assertIsInstance(im, modulegraph.InvalidRelativeImport)
self.assertEqual(im.relative_path, '..')
self.assertEqual(im.from_name, 'sys')
self.assertEqual(im.identifier, '..sys')
im1 = imported[1]
im2 = imported[2]
if im1.identifier == '...xml':
# Order of modules imported in a single 'from .. import a, b' list
# is unspecified, ensure a fixed order for this test.
im2, im1 = im1, im2
self.assertIsInstance(im1, modulegraph.InvalidRelativeImport)
self.assertEqual(im1.relative_path, '...')
self.assertEqual(im1.from_name, 'os')
self.assertEqual(im1.identifier, '...os')
im = imported[2]
self.assertIsInstance(im2, modulegraph.InvalidRelativeImport)
self.assertEqual(im2.relative_path, '...')
self.assertEqual(im2.from_name, 'xml')
self.assertEqual(im2.identifier, '...xml')
im = imported[3]
self.assertIsInstance(im, modulegraph.InvalidRelativeImport)
self.assertEqual(im.relative_path, '..foo')
self.assertEqual(im.from_name, 'bar')
self.assertEqual(im.identifier, '..foo.bar')
im = imported[4]
self.assertIs(im, mf.find_node('pkg'))
class TestInvalidAsyncFunction (unittest.TestCase):
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, value, types):
if not isinstance(value, types):
self.fail("%r is not an instance of %r"%(value, types))
def test_extended_args_import():
source = "".join(f"dummy_var{i} = {i}\n" for i in range(300)) + "import os\n"
code = compile(source, "", "exec")
node = modulegraph.Node("dummy_module")
node._deferred_imports = []
node.code = code
graph = modulegraph.ModuleGraph()
graph._scan_bytecode(node, code, True)
assert node._deferred_imports[0][1][0] == "os"
if __name__ == "__main__":
unittest.main()
| pyinstaller/pyinstaller | tests/unit/test_modulegraph/test_imports.py | test_imports.py | py | 20,640 | python | en | code | 10,769 | github-code | 1 | [
{
"api_name": "unittest.TestCase",
"line_number": 11,
"usage_type": "attribute"
},
{
"api_name": "textwrap.dedent",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "textwrap.dedent",
"line_number": 25,
"usage_type": "call"
},
{
"api_name": "subprocess.Pop... |
18061284249 | import gym
import math
import numpy as np
import cv2
import hashlib
import collections
from gym.envs.atari import AtariEnv
from . import utils
from gym.vector import VectorEnv
from typing import Union, Optional
class EpisodicDiscounting(gym.Wrapper):
"""
Applies discounting at the episode level
"""
def __init__(self, env: gym.Env, discount_type, discount_gamma=1.0, discount_bias: float = 1.0):
super().__init__(env)
self.env = env
self.t = 0
self.discount_type = discount_type
self.discount_gamma = discount_gamma
self.discount_bias = discount_bias
@staticmethod
def get_discount(i: float, discount_type: str, gamma: float=1.0, discount_bias: float = 1.0):
"""
Returns discount (gamma_i) for reward (r_i), with discounting parameter gamma.
"""
i = i + discount_bias
if discount_type == "finite":
m = 1/(1-gamma)
discount = 1.0 if i <= m else 0
elif discount_type == "geometric":
discount = gamma ** i
elif discount_type == "quadratic":
discount = 1 / (i*(i+1))
elif discount_type == "power": # also called hyperbolic
epsilon = 1e-1
discount = i ** (-1-epsilon) # minus epsilon so sequence converges
elif discount_type == "harmonic":
discount = 1 / (i * (math.log(i)**2))
elif discount_type == "none":
discount = 1.0
else:
raise ValueError(f"Invalid discount_type {discount_type}")
return discount
@staticmethod
def get_normalization_constant(k:np.ndarray, discount_type: str, gamma: float = 1.0, discount_bias: float = 1.0):
k = k + discount_bias
if discount_type == "finite":
m = 1/(1-gamma)
steps_remaining = (m-k)
steps_remaining = np.clip(steps_remaining, 0, float('inf')) # make sure steps remaining is not negative
normalizer = steps_remaining+1
elif discount_type == "geometric":
normalizer = (gamma ** k) / (1-gamma)
elif discount_type == "quadratic":
normalizer = 1 / k
elif discount_type == "power": # also called hyperbolic
epsilon = 1e-1
normalizer = (1 / epsilon) * (k ** -epsilon)
elif discount_type == "harmonic":
normalizer = 1 / np.log(k)
elif discount_type == "none":
normalizer = 1.0
else:
raise ValueError(f"Invalid discount_type {discount_type}")
return normalizer
def reset(self):
return self.env.reset()
def step(self, action):
obs, reward, done, info = self.env.step(action)
assert "time" in info, "Must place timeAware wrapper before episodic discount wrapper"
time = info["time"]
discount = EpisodicDiscounting.get_discount(time, discount_type=self.discount_type, gamma=self.discount_gamma,
discount_bias=self.discount_bias)
reward *= discount
return obs, reward, done, info
class NoPassThruWrapper(gym.Wrapper):
"""
Always returns first state after reset. Can be used to debug performance hit from running environment / wrappers.
"""
def __init__(self, env: gym.Env):
super().__init__(env)
self.first = False
def reset(self):
self.obs = self.env.reset()
self.first = True
return self.obs
def step(self, action):
if self.first:
self.obs, _, _, self.info = self.env.step(action)
self.first = False
return self.obs, 0, False, self.info
class ActionAwareWrapper(gym.Wrapper):
"""
Includes previous on frame.
input should be [H, W, C] of dtype np.unit8
The action used to arrive in this state is marked onto the frame.
"""
def __init__(self, env: gym.Env):
super().__init__(env)
def reset(self, **kwargs):
obs = self.env.reset(**kwargs)
return self._process_obs(obs, -1)
def _process_obs(self, obs, action: int):
# input should be C, H, W, or H, W
assert obs.dtype == np.uint8
# draw actions we pressed on frames
BLOCK_SIZE = 4
if action >= 0:
x = action * BLOCK_SIZE
y = 0
if len(obs.shape) == 2:
obs[x:x+BLOCK_SIZE, y:y+BLOCK_SIZE] = 255
else:
C, H, W = obs.shape
# this is a bit of a hack, procgen and atari have different channel order.
if C < H:
obs[:, x:x + BLOCK_SIZE, y:y + BLOCK_SIZE] = 255
else:
obs[x:x + BLOCK_SIZE, y:y + BLOCK_SIZE, :] = 255
return obs
def step(self, action:int):
assert type(action) in [int, np.int, np.int32, np.int16], f"Action aware requires discrete actions, but found action of type {type(action)}"
obs, reward, done, info = self.env.step(action)
return self._process_obs(obs, action), reward, done, info
class TimeAwareWrapper(gym.Wrapper):
"""
Includes time on frame of last channel of observation (which is last state if using stacking)
Observational spaces should be 2d image in format
[..., C, H, W]
"""
def __init__(self, env: gym.Env, log:bool=False):
"""
Enabling log will present the log time elapsed.
"""
super().__init__(env)
self.log = log
def reset(self, **kwargs):
obs = self.env.reset(**kwargs)
return self._process_obs(obs, 0)
def _process_obs(self, obs, time_frac):
assert obs.dtype == np.uint8
*_, C, H, W = obs.shape
x_point = 3 + (W-6) * time_frac
obs[..., 0, -4:, :] = 0
obs[..., 0, -3:-1, 3:-3] = 64
obs[..., 0, -3:-1, 3:math.floor(x_point)] = 255
obs[..., 0, -3:-1, math.floor(x_point)] = 64+int((x_point % 1) * (255-64))
return obs
def step(self, action):
obs, reward, done, info = self.env.step(action)
assert "time_frac" in info, "Must use TimeLimitWrapper."
if self.log:
# log
t = info["time"]
if t == 0:
max_t = 100
else:
max_t = info["time"] / info["time_frac"]
time_frac = math.log(1+t) / math.log(1+max_t)
else:
# linear
time_frac = np.clip(info["time_frac"], 0, 1)
return self._process_obs(obs, time_frac), reward, done, info
class ActionHistoryWrapper(gym.Wrapper):
"""
Includes markings on final frame in stack indicating history of actions
[..., C, H, W]
"""
def __init__(self, env: gym.Env):
super().__init__(env)
self.action_history = collections.deque(maxlen=100)
def reset(self, **kwargs):
obs = self.env.reset(**kwargs)
self.action_history.clear()
return self._process_obs(obs)
def _process_obs(self, obs):
assert obs.dtype == np.uint8
*_, C, H, W = obs.shape
# draw history of actions at bottom final state
n_actions = self.action_space.n
obs[0, :n_actions, :] = 32
for x, a in enumerate(list(self.action_history)[:W]):
if a < 0:
# -1 means env was ignored.
continue
y = a
obs[0, y, x] = 255
return obs
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.action_history.appendleft(action)
return self._process_obs(obs), reward, done, info
def save_state(self, buffer):
buffer["action_history"] = self.action_history
def restore_state(self, buffer):
self.action_history = buffer["action_history"]
class StateHistoryWrapper(gym.Wrapper):
"""
Includes markings on final frame in stack indicating (compressed) history of states
Assumes input is
[C, H, W]
"""
def __init__(self, env: gym.Env):
super().__init__(env)
self.state_history = collections.deque(maxlen=100)
def reset(self, **kwargs):
obs = self.env.reset(**kwargs)
self.action_history.clear()
return self._process_obs(obs)
def _process_obs(self, obs):
assert obs.dtype == np.uint8
*_, C, H, W = obs.shape
# draw history of actions at bottom final state
n_actions = self.action_space.n
# we leave space for n_actions...
obs[0, n_actions:n_actions + 49, :] = 0
for x, state in enumerate(list(self.state_history)[:W]):
obs[0, n_actions:n_actions+49, x] = state
return obs
def compressed_state(self, x):
"""
Returns the compressed version of the state
Input should be [C,H,W]
Output will be [49]
"""
x = x[-1] # take most recent on stack
x_resized = cv2.resize(x, (7, 7), interpolation=cv2.INTER_AREA)
assert x_resized.dtype == np.uint8
return x_resized.ravel()
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.state_history.appendleft(self.compressed_state(obs))
return self._process_obs(obs), reward, done, info
def save_state(self, buffer):
buffer["state_history"] = self.state_history
def restore_state(self, buffer):
self.state_history = buffer["state_history"]
class HashWrapper(gym.Wrapper):
"""
Maps observation onto a random sequence of pixels.
This is helpful for testing if the agent is simply memorizing the environment, as no generalization between
states is possible under this observation.
"""
def __init__(self, env, hash_size, use_time=False):
"""
Map observation to a hash of observation.
"""
super().__init__(env)
self.env = env
self.use_time = use_time
self.hash_size = hash_size
self.counter = 0
def step(self, action):
original_obs, reward, done, info = self.env.step(action)
if self.use_time:
state_hash = self.counter
else:
state_hash = int(hashlib.sha256(original_obs.data.tobytes()).hexdigest(), 16)
# note: named tensor would help get this shape right...
w, h, c = original_obs.shape
rng = np.random.RandomState(state_hash % (2**32)) # ok... this limits us to 32bits.. might be a better way to do this?
# seed the random generator and create an random 42x42 observation.
# note: I'm not sure how many bits the numpy random generate will use, it's posiable it's a lot less than
# 1024. One option is then to break up the observation into parts. Another would be to just assume that the
# number of reachable states is much much less than this, and that the chance of a collision (alaising) is
# very low.
new_obs = rng.randint(0, 1+1, (self.hash_size,self.hash_size), dtype=np.uint8) * 255
new_obs = cv2.resize(new_obs, (h, w), interpolation=cv2.INTER_NEAREST)
new_obs = new_obs[:, :, np.newaxis]
new_obs = np.concatenate([new_obs]*c, axis=2)
self.counter += 1
return new_obs, reward, done, info
def reset(self):
self.counter = 0
return self.env.reset()
class EpisodicLifeEnv(gym.Wrapper):
def __init__(self, env):
"""Make end-of-life == end-of-episode, but only reset on true game over.
Done by DeepMind for the DQN and co. since it helps value estimation.
"""
gym.Wrapper.__init__(self, env)
self.lives = 0
self.was_real_done = True
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.was_real_done = done
# check current lives, make loss of life terminal,
# then update lives to handle bonus lives
lives = self.env.unwrapped.ale.lives()
if lives < self.lives and lives > 0:
# for Qbert sometimes we stay in lives == 0 condition for a few frames
# so it's important to keep lives > 0, so that we only reset once
# the environment advertises done.
done = True
info['fake_done'] = True
self.lives = lives
return obs, reward, done, info
def reset(self, **kwargs):
"""Reset only when lives are exhausted.
This way all states are still reachable even though lives are episodic,
and the learner need not know about any of this behind-the-scenes.
"""
if self.was_real_done:
obs = self.env.reset(**kwargs)
else:
# no-op step to advance from terminal/lost life state
obs, _, _, _ = self.env.step(0)
self.lives = self.env.unwrapped.ale.lives()
return obs
class FrameSkipWrapper(gym.Wrapper):
"""
Performs frame skipping with max over last two frames.
From https://github.com/openai/baselines/blob/7c520852d9cf4eaaad326a3d548efc915dc60c10/baselines/common/atari_wrappers.py
"""
def __init__(self, env, min_skip=4, max_skip=None, reduce_op=np.max):
"""Return only every `skip`-th frame"""
gym.Wrapper.__init__(self, env)
if max_skip is None:
max_skip = min_skip
assert env.observation_space.dtype == "uint8"
assert min_skip >= 1
assert max_skip >= min_skip
# most recent raw observations
self._obs_buffer = np.zeros((2,)+env.observation_space.shape, dtype=np.uint8)
self._min_skip = min_skip
self._max_skip = max_skip
self._reduce_op = reduce_op
self._t = 0
def step(self, action):
"""Repeat action, sum reward, and max over last two observations."""
total_reward = 0.0
done = None
info = {}
skip = np.random.randint(self._min_skip, self._max_skip+1)
for i in range(skip):
obs, reward, done, _info = self.env.step(action)
if i >= skip - 2:
t = i - (skip - 2)
self._obs_buffer[t] = obs
# combine infos, with overwriting
if _info is not None:
info.update(_info)
total_reward += reward
if done:
break
# first frame will be from reset and gets an empty info, or the info from the last frame of previous
# episode. Performing increment here means second frame seen will be tagged as t=1, which is what we want.
self._t += 1
if done:
# may as well output a blank frame, as this frame will (should) not be used.
# what will happen is env will be auto-reset and the first frame of the next game will
# be used instead.
reduce_frame = self._reduce_op(self._obs_buffer*0, axis=0)
self._t = 0 # for some reason I use the info from the last state as the info for the reset observation.
# this is due to gym not having a way to get info from a reset :(
else:
reduce_frame = self._reduce_op(self._obs_buffer, axis=0)
# fix up the time step
# normally time refers to the steps in the environment, however it's convenient to instead use number
# of interactions with the environment. Therefore we remap the 'time' statistic to the number of interactions
# and store the original time as time_raw.
if 'time' in info:
info['time_raw'] = info['time']
info['time'] = self._t
return reduce_frame, total_reward, done, info
def save_state(self, buffer):
buffer["t"] = self._t
def restore_state(self, buffer):
self._t = buffer["t"]
def reset(self, **kwargs):
self._t = 0
return self.env.reset(**kwargs)
class ClipRewardWrapper(gym.Wrapper):
""" Clips reward to given range"""
def __init__(self, env: gym.Env, clip: float):
super().__init__(env)
self.clip = clip
def step(self, action):
obs, reward, done, info = self.env.step(action)
if reward > self.clip or reward < -self.clip:
info["unclipped_reward"] = reward
reward = np.clip(reward, -self.clip, +self.clip)
return obs, reward, done, info
class DeferredRewardWrapper(gym.Wrapper):
"""
All rewards are delayed until given frame. If frame is -1 then uses terminal state
"""
def __init__(self, env: gym.Env, time_limit=-1):
super().__init__(env)
self.env = env
self.t = 0
self.episode_reward = 0
self.time_limit = time_limit
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.t += 1
give_rewards = (self.t == self.time_limit) or ((self.time_limit == - 1) and done)
self.episode_reward += reward
if give_rewards:
new_reward = self.episode_reward
self.episode_reward = 0
else:
new_reward = 0
return obs, new_reward, done, info
def reset(self):
obs = self.env.reset()
self.t = 0
self.episode_reward = 0
return obs
def save_state(self, buffer):
buffer["t"] = self.t
buffer["episode_reward"] = self.episode_reward
def restore_state(self, buffer):
self.t = buffer["t"]
self.episode_reward = buffer["episode_reward"]
class SaveEnvStateWrapper(gym.Wrapper):
"""
Enables saving and restoring of the environment state.
Only support atari at the moment.
"""
def __init__(self, env: gym.Env, determanistic:bool = True):
super().__init__(env)
self.determanistic = determanistic
def save_state(self, buffer):
assert type(self.unwrapped) == AtariEnv, "Only Atari is supported for state saving/loading"
buffer["atari"] = self.unwrapped.clone_state(include_rng=self.determanistic)
def restore_state(self, buffer):
assert type(self.unwrapped) == AtariEnv, "Only Atari is supported for state saving/loading"
assert "atari" in buffer, "No state information found for Atari."
self.unwrapped.restore_state(buffer["atari"])
class SqrtRewardWrapper(gym.Wrapper):
""" Clips reward to given range"""
def __init__(self, env: gym.Env, epsilon: float = 1e-3):
super().__init__(env)
self.epsilon = epsilon
def step(self, action):
obs, reward, done, info = self.env.step(action)
sign = -1 if reward < 0 else +1
new_reward = sign*(math.sqrt(abs(reward)+1)-1)+self.epsilon*reward
return obs, new_reward, done, info
class RewardCurveWrapper(gym.Wrapper):
"""
Rewards get larger over time.
"""
def __init__(self, env: gym.Env, scale:float):
super().__init__(env)
self.env = env
self.t = 0
self.scale=scale
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.t += 1
reward = reward * self.t * self.scale
return obs, reward, done, info
def reset(self):
obs = self.env.reset()
self.t = 0
return obs
def save_state(self, buffer):
buffer["t"] = self.t
def restore_state(self, buffer):
self.t = buffer["t"]
class NormalizeObservationsWrapper(gym.Wrapper):
"""
Normalizes observations.
"""
def __init__(self, env, clip, shadow_mode=False, initial_state=None):
super().__init__(env)
self.env = env
self.epsilon = 1e-8
self.clip = clip
self.obs_rms = utils.RunningMeanStd(shape=())
self.shadow_mode = shadow_mode
if initial_state is not None:
self.obs_rms.restore_state(initial_state)
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.obs_rms.update(obs)
self.mean = self.obs_rms.mean
self.std = np.sqrt(self.obs_rms.var)
info["observation_norm_state"] = self.obs_rms.save_state()
if self.shadow_mode:
return obs, reward, done, info
else:
scaled_obs = (obs - self.mean) / (self.std + self.epsilon)
scaled_obs = np.clip(scaled_obs, -self.clip, +self.clip)
scaled_obs = np.asarray(scaled_obs, dtype=np.float32)
return scaled_obs, reward, done, info
def save_state(self, buffer):
buffer["obs_rms"] = self.obs_rms.save_state()
def restore_state(self, buffer):
self.obs_rms.restore_state(buffer["obs_rms"])
class RewardScaleWrapper(gym.Wrapper):
def __init__(self, env:gym.Env, scale:float):
super().__init__(env)
self.scale = scale
def step(self, action):
obs, reward, done, info = self.env.step(action)
return obs, reward*self.scale, done, info
class BigRedButtonWrapper(gym.Wrapper):
"""
Adds 1% chance to insert a big red button into the observation space. If the agent presses any action with an
even index the episode terminates with a penality equal to all accumulated score so far.
"""
def __init__(self, env:gym.Env, p: float = 0.01, change_actions=False):
"""
@param p: probability that button is inserted each frame.
Note: this will not work well on environments with negative rewards.
"""
super().__init__(env)
self.p = p
self.time_since_button_shown = None
self.action_required = 0
self.accumulated_reward = 0
self.change_actions = change_actions
def step(self, action: int):
obs, reward, done, info = self.env.step(action)
assert obs.shape == (84, 84, 1), "Sorry big red button is hardcoded for 84x84 resolution, single channel."
assert obs.dtype == np.uint8, "Sorry big red button is hardcoded for uint8."
if self.time_since_button_shown == 1:
info['button'] = self.action_required
# we delay a little just because the environment might be stochastic
# actually this does not matter... because stochastic is implemented up the river, by ALE.
if action != self.action_required:
# blow up the world
info['pushed_button'] = True
return obs*0, -10000, True, info
# draw the 'button'
if np.random.rand() < self.p:
self.time_since_button_shown = 0
obs //= 3
if self.change_actions:
self.action_required = np.random.randint(0, self.env.action_space.n)
x_pos = 10 + (self.action_required % 4) * 13
y_pos = 10 + (self.action_required // 4) * 13
obs[x_pos:x_pos+10, y_pos:y_pos+10] = 255
else:
self.action_required = 0
obs[42-16:42+16, 42-16:42+16] = 255
if self.time_since_button_shown is not None:
self.time_since_button_shown += 1
self.accumulated_reward += reward
return obs, reward, done, info
def save_state(self, buffer):
buffer["time_since_button_shown"] = self.time_since_button_shown
buffer["accumulated_reward"] = self.accumulated_reward
buffer["action_required"] = self.action_required
def restore_state(self, buffer):
self.time_since_button_shown = buffer["time_since_button_shown"]
self.accumulated_reward = buffer["accumulated_reward"]
self.action_required = buffer["action_required"]
def reset(self, **kwargs):
self.time_since_button_shown = None
self.accumulated_reward = 0
return self.env.reset()
class RandomTerminationWrapper(gym.Wrapper):
def __init__(self, env:gym.Env, p: float):
"""
Terminates environment with per step probability p.
This can be used to create an environment with very stochastic value functions.
"""
super().__init__(env)
self.p = p
def step(self, action):
obs, reward, done, info = self.env.step(action)
done = done or (np.random.rand() < self.p)
return obs, reward, done, info
class LabelEnvWrapper(gym.Wrapper):
def __init__(self, env:gym.Env, label_name:str, label_value:str):
super().__init__(env)
self.label_name = label_name
self.label_value = label_value
def step(self, action):
obs, reward, done, info = self.env.step(action)
info[self.label_name] = self.label_value
return obs, reward, done, info
class ZeroObsWrapper(gym.Wrapper):
def __init__(self, env:gym.Env):
super().__init__(env)
def step(self, action):
obs, reward, done, info = self.env.step(action)
return obs*0, reward, done, info
def reset(self, **kwargs):
obs = self.env.reset(**kwargs)
return obs*0
class ReturnTracker():
"""
Tracks returns for normalization accross a (masked) vector of environmentst
"""
def __init__(self, num_envs: int, gamma: float):
self.ret_rms = utils.RunningMeanStd(shape=())
self.current_returns = np.zeros([num_envs], dtype=np.float32)
self.gamma = gamma
def reset(self):
self.current_returns *= 0
def update(self, rewards:np.ndarray, dones:np.ndarray, mask:np.ndarray):
if sum(mask) == 0:
return
# the self.gamma here doesn't make sense to me as we are discounting into the future rather than from the past
# but it is what OpenAI does...
self.current_returns[mask] = rewards[mask] + self.gamma * self.current_returns[mask] * (1 - dones[mask])
self.ret_rms.update(self.current_returns[mask])
class VecRepeatedActionPenalty(gym.Wrapper):
def __init__(self, env: VectorEnv, max_repeated_actions: int, penalty: float = 1):
super().__init__(env)
self.max_repeated_actions = max_repeated_actions
self.penalty = penalty
self.prev_actions = np.zeros([env.num_envs], dtype=np.int32)
self.duplicate_counter = np.zeros([env.num_envs], dtype=np.int32)
def reset(self, **kwargs):
self.prev_actions *= 0
self.duplicate_counter *= 0
return self.env.reset()
def step(self, actions):
obs, rewards, dones, infos = self.env.step(actions)
no_action_mask = (actions >= 0) # action=-1 means we ignored that environment
mask = (actions == self.prev_actions) * no_action_mask
self.duplicate_counter += mask
self.duplicate_counter *= mask
too_many_repeated_actions = (self.duplicate_counter > self.max_repeated_actions)
infos[0]['max_repeats'] = np.max(self.duplicate_counter)
infos[0]['mean_repeats'] = np.mean(self.duplicate_counter)
if np.sum(too_many_repeated_actions) > 0:
for i, repeated_action in enumerate(too_many_repeated_actions):
if repeated_action:
infos[i]['repeated_action'] = actions[i]
self.prev_actions[:] = actions[:]
return obs, rewards - (too_many_repeated_actions * self.penalty), dones, infos
class VecNormalizeRewardWrapper(gym.Wrapper):
"""
Normalizes rewards such that returns are roughly unit variance.
Vectorized version.
Also clips rewards
"""
def __init__(
self,
env: VectorEnv,
initial_state=None,
gamma: float = 1.0,
clip: float = 10.0,
scale: float = 1.0,
returns_transform=lambda x: x,
mode: str = "rms",
ed_type: Optional[str] = None,
ed_bias: float = 1.0,
ema_horizon: float = 5e6,
):
"""
Normalizes returns
mode:
rms uses running variance over entire history,
ema uses ema over 5M steps.
custom requires setting of ret_std
"""
super().__init__(env)
self.clip = clip
self.epsilon = 1e-2 # was 1e-8 (1e-2 will allow 10x scaling on rewards... which probably about right.)
self.current_returns = np.zeros([env.num_envs], dtype=np.float32)
self.ret_rms = utils.RunningMeanStd(shape=())
self.gamma = gamma
self.scale = scale
self.mode = mode
self.returns_transform = returns_transform
self.ed_type = ed_type
self.ed_bias = ed_bias
self.ret_var = 0.0
self.ema_horizon = ema_horizon
if initial_state is not None:
self.ret_rms.restore_state(initial_state)
def reset(self):
self.current_returns *= 0
return self.env.reset()
def step(self, actions):
obs, rewards, dones, infos = self.env.step(actions)
# note:
# we used to do this with:
#
# self.current_returns = rewards + self.gamma * self.current_returns
# self.ret_rms.update(self.current_returns)
# self.current_returns = self.current_returns * (1-dones)
#
# which I think is more correct, but is quite inconsistent when rewards are at terminal states.
# I also think this matches OpenAI right?
# now instead we do it the older way, which I think was OpenAI's older method.
# Note: the important change here is on what happens on a transition that both gives reward and terminates.
# ok, so follow up
# baselines https://github.com/openai/baselines/blob/ea25b9e8b234e6ee1bca43083f8f3cf974143998/baselines/common/vec_env/vec_normalize.py#L4
# they do this the v2 way, which is zero returns after update, I can show this is wrong.
# baselines3
# they just ignore terminals... interesting...
# I think my way is correct. It correlates well with the true return
# the self.gamma here doesn't make sense to me as we are discounting into the future rather than from the past
# but it is what OpenAI does...
self.current_returns = rewards + self.gamma * self.current_returns * (1-dones)
# episodic discounting return normalization
if self.ed_type is not None:
times = np.asarray([info.get('time', 0) for info in infos]) # during warmup we occasionally get some empty infos
norms = EpisodicDiscounting.get_normalization_constant(times, self.ed_type, discount_bias=self.ed_bias)
else:
norms = 1
self.ret_rms.update(self.returns_transform(self.current_returns/norms)) # stub /norms
if self.mode == "ema":
# note: we move EMA a bit faster at the beginning
alpha = 1 - (len(dones) / min(self.ret_rms.count, self.ema_horizon))
self.ret_var = alpha * self.ret_var + (1 - alpha) * np.var(self.current_returns)
scaled_rewards = rewards / self.std
# print(self.current_returns.max())
# print(scaled_rewards.max())
if self.clip is not None and self.clip >= 0:
rewards_copy = scaled_rewards.copy()
scaled_rewards = np.clip(scaled_rewards, -self.clip, +self.clip)
clips = np.sum(rewards_copy != scaled_rewards)
if clips > 0:
# log if clipping occurred.
infos[0]["reward_clips"] = clips
scaled_rewards *= self.scale
return obs, scaled_rewards, dones, infos
@property
def mean(self):
return self.ret_rms.mean
@property
def std(self):
if self.mode == "rms":
return math.sqrt(self.ret_rms.var + self.epsilon)
elif self.mode in ["ema", "custom"]:
return math.sqrt(self.ret_var + self.epsilon)
else:
raise ValueError(f"Invalid mode {self.mode}")
def save_state(self, buffer):
buffer["ret_rms"] = self.ret_rms.save_state()
buffer["ret_var"] = self.ret_var
buffer["current_returns"] = self.current_returns
def restore_state(self, buffer):
self.ret_var = buffer["ret_var"]
self.ret_rms.restore_state(buffer["ret_rms"])
self.current_returns = buffer["current_returns"]
class MultiEnvVecNormalizeRewardWrapper(gym.Wrapper):
"""
Normalizes rewards such that returns are unit normal.
Supports normalization for multiple environment types.
Vectorized version.
Also clips rewards
"""
def __init__(
self,
env: VectorEnv,
gamma: float = 1.0,
clip: float = 10.0,
scale: float = 1.0,
):
"""
Normalizes returns
"""
super().__init__(env)
self.clip = clip
self.epsilon = 1e-8
self.current_returns = np.zeros([env.num_envs], dtype=np.float32)
self.normalizers = {'default': ReturnTracker(env.num_envs, gamma)}
self.gamma = gamma
self.scale = scale
def reset(self):
for k, v in self.normalizers.items():
v.reset()
return self.env.reset()
def step(self, actions):
obs, rewards, dones, infos = self.env.step(actions)
env_ids = []
for info in infos:
env_ids.append(info.get("env_id", "default"))
scaled_rewards = rewards.copy()
# multi-env support
for env_id in set(env_ids):
if env_id not in self.normalizers:
self.normalizers[env_id] = ReturnTracker(self.env.num_envs, self.gamma)
mask = [id == env_id for id in env_ids]
self.normalizers[env_id].update(rewards, dones, mask)
scaled_rewards[mask] /= math.sqrt(self.normalizers[env_id].ret_rms.var + self.epsilon)
# clip rewards, and monitor for clipping
if self.clip is not None:
rewards_copy = scaled_rewards.copy()
scaled_rewards = np.clip(scaled_rewards, -self.clip, +self.clip)
clips = np.sum(rewards_copy != scaled_rewards)
if clips > 0:
# log if clipping occurred.
infos[0]["reward_clips"] = clips
scaled_rewards *= self.scale
return obs, scaled_rewards, dones, infos
@property
def mean(self):
return self.normalizers["default"].ret_rms.mean
@property
def std(self):
return math.sqrt(self.normalizers["default"].ret_rms.var + self.epsilon)
def save_state(self, buffer):
buffer["normalizers"] = self.normalizers
def restore_state(self, buffer):
self.normalizers = buffer["normalizers"]
class VecNormalizeObservationsWrapper(gym.Wrapper):
"""
Normalizes observations.
Vectorized Version
Preserves type
"""
def __init__(self, env: VectorEnv, clip=3.0, initial_state=None, scale_mode="normal", stacked=False):
"""
shadow_mode: Record mean and std of obs, but do not apply normalization.
scale_mode:
unit_normal: Observations will be float32 unit normal,
scaled: Observations will be 0..1 scaled to uint8 where 0 = -clip, 127=0, and 255 = +clip.
shadow: No normalization, used to monitor mu and std.
stacked:
if true causes normalization to be per frame rather than per stack
"""
super().__init__(env)
assert scale_mode in ["unit_normal", "scaled", "shadow"]
self.env = env
self.epsilon = 1e-4
self.clip = clip
self.obs_rms = utils.RunningMeanStd()
self.scale_mode = scale_mode
self.stacked = stacked
if initial_state is not None:
self.obs_rms.restore_state(initial_state)
def step(self, action):
"""
Input should be [B, *obs_shape] of not stacked, otherwise [B, [stack_size], *obs_shape]
"""
obs: np.ndarray
reward: np.ndarray
obs, reward, done, info = self.env.step(action)
if self.stacked:
B, stack_size, *obs_shape = obs.shape
self.obs_rms.update(obs.reshape(B*stack_size, *obs_shape))
else:
self.obs_rms.update(obs)
self.mean = self.obs_rms.mean.astype(np.float32)
self.std = np.sqrt(self.obs_rms.var).astype(np.float32)
if self.scale_mode == "shadow":
return obs, reward, done, info
elif self.scale_mode == "unit_normal":
scaled_obs = (obs.astype(np.float32) - self.mean) / (self.std + self.epsilon)
scaled_obs = np.clip(scaled_obs, -self.clip, +self.clip)
return scaled_obs, reward, done, info
elif self.scale_mode == "scaled":
scaled_obs = (obs.astype(np.float32) - self.mean) / (self.std + self.epsilon)
scaled_obs = (np.clip(scaled_obs, -self.clip, +self.clip) / (self.clip*2) + 0.5) * 255
scaled_obs = scaled_obs.astype(np.uint8)
return scaled_obs, reward, done, info
else:
raise ValueError(f"Invalid scale_mode {self.scale_mode}")
def save_state(self, buffer):
buffer["obs_rms"] = self.obs_rms.save_state()
def restore_state(self, buffer):
self.obs_rms.restore_state(buffer["obs_rms"])
class MonitorWrapper(gym.Wrapper):
"""
Records a copy of the current observation and reward into info.
This can be helpful to retain an unmodified copy of the input.
"""
def __init__(self, env: gym.Env, monitor_video=False):
super().__init__(env)
self.monitor_video = monitor_video
def step(self, action):
obs, reward, done, info = self.env.step(action)
if self.monitor_video:
info["monitor_obs"] = obs.copy()
info["raw_reward"] = reward
return obs, reward, done, info
class FrameCropWrapper(gym.Wrapper):
"""
Crops input frame.
"""
def __init__(self, env: gym.Env, x1, x2, y1, y2):
super().__init__(env)
self.cropping = (slice(y1, y2, 1), slice(x1, x2, 1))
def step(self, action):
obs, reward, done, info = self.env.step(action)
obs = obs[self.cropping]
return obs, reward, done, info
class TimeLimitWrapper(gym.Wrapper):
"""
From https://github.com/openai/baselines/blob/master/baselines/common/wrappers.py
"""
def __init__(self, env, max_episode_steps=None):
super().__init__(env)
self._max_episode_steps = max_episode_steps
self._elapsed_steps = 0
def step(self, ac):
observation, reward, done, info = self.env.step(ac)
self._elapsed_steps += 1
if self._elapsed_steps >= self._max_episode_steps:
done = True
info['TimeLimit.truncated'] = True
# when a done occurs we will reset and the observation returned will be the first frame of a new
# episode, so time_frac should be 0. Remember time_frac is the time of the state we *land in* not
# of the state we started from.
info['time_frac'] = (self._elapsed_steps / self._max_episode_steps) if not done else 0
info['time'] = self._elapsed_steps if not done else 0
return observation, reward, done, info
def reset(self, **kwargs):
self._elapsed_steps = 0
return self.env.reset(**kwargs)
def save_state(self, buffer):
buffer["_elapsed_steps"] = self._elapsed_steps
def restore_state(self, buffer):
self._elapsed_steps = buffer["_elapsed_steps"]
class AtariWrapper(gym.Wrapper):
"""
Applies Atari frame warping, optional gray-scaling, and frame stacking as per nature paper.
Note: unlike Nature the initial frame cropping is disabled by default.
input: 210x160x3 uint8 RGB frames or 210x160 uint8 grayscale frames
output: 84x84x1 uint8 grayscale frame (by default)
"""
def __init__(self, env: gym.Env, width=84, height=84, interpolation=None):
"""
Stack and do other stuff...
Input should be (210, 160, 3)
Output of size (width, height, 3)
"""
super().__init__(env)
self._width, self._height = width, height
assert env.observation_space.dtype == np.uint8, "Invalid dtype {}".format(env.observation_space.dtype)
assert env.observation_space.shape in [(210, 160), (210, 160, 3)], "Invalid shape {}".format(env.observation_space.shape)
if interpolation is None:
# sort out default interpolation
if (width, height) == (210, 160):
interpolation = cv2.INTER_NEAREST # this doesn't matter as no interpolation will be done.
elif (width, height) == (105, 80):
interpolation = cv2.INTER_LINEAR # faster and better with a clean scaling
else:
interpolation = cv2.INTER_AREA # safest option for general resizing.
self.n_channels = 3
self.interpolation = interpolation
self.observation_space = gym.spaces.Box(
low=0,
high=255,
shape=(self._width, self._height, self.n_channels),
dtype=np.uint8,
)
def _process_frame(self, obs):
assert len(obs.shape) in [2, 3]
if len(obs.shape) == 2:
obs = np.expand_dims(obs, 2)
width, height, channels = obs.shape
if (width, height) != (self._width, self._height):
obs = cv2.resize(obs, (self._height, self._width), interpolation=self.interpolation)
if len(obs.shape) == 2:
obs = obs[:, :, np.newaxis]
return obs
def step(self, action):
obs, reward, done, info = self.env.step(action)
info["channels"] = ["ColorR", "ColorG", "ColorB"]
return self._process_frame(obs), reward, done, info
def reset(self):
obs = self.env.reset()
return self._process_frame(obs)
class TimeFeatureWrapper(gym.Wrapper):
"""
Adds time as a input feature
Input should R^D
"""
def __init__(self, env):
super().__init__(env)
self.env = env
assert len(self.env.observation_space.shape) == 1, f"Input should in R^D, shape was {self.env.observation_space.shape}"
D = self.env.observation_space.shape[0]
self.observation_space = gym.spaces.Box(0, 255, (D+1,), dtype=self.env.observation_space.dtype)
@staticmethod
def _process_frame(obs: np.ndarray, time: float):
D = obs.shape[0]
new_obs = np.zeros((D+1,), dtype=obs.dtype)
new_obs[:-1] = obs
new_obs[-1] = time
return new_obs
def step(self, action):
obs, reward, done, info = self.env.step(action)
assert 'time_frac' in info, "must include timelimit wrapper before TimeChannelWrapper"
obs = self._process_frame(obs, info['time_frac'])
return obs, reward, done, info
def reset(self):
obs = self.env.reset()
return self._process_frame(obs, 0)
class TimeChannelWrapper(gym.Wrapper):
"""
Adds time as a channel
Input should be in HWC order
"""
def __init__(self, env):
super().__init__(env)
self.env = env
H, W, C = self.env.observation_space.shape
assert C < H, f"Input should be in HWC format, not CHW, shape was {self.env.observation_space.shape}"
self.observation_space = gym.spaces.Box(0, 255, (H, W, C+1), dtype=np.uint8)
def _process_frame(self, obs: np.ndarray, time: float):
assert obs.dtype == np.uint8
H, W, C = obs.shape
assert C < H, "Must be channels first."
new_obs = np.zeros((H, W, C+1), dtype=np.uint8)
new_obs[:, :, :-1] = obs
new_obs[:, :, -1] = time * 255
return new_obs
def step(self, action):
obs, reward, done, info = self.env.step(action)
assert 'time_frac' in info, "must include timelimit wrapper before TimeChannelWrapper"
obs = self._process_frame(obs, info['time_frac'])
if "channels" in info:
info["channels"] += ["Gray"]
return obs, reward, done, info
def reset(self):
obs = self.env.reset()
return self._process_frame(obs, 0)
class ChannelsFirstWrapper(gym.Wrapper):
"""
Puts observation in channels first order
"""
def __init__(self, env):
super().__init__(env)
self.env = env
H, W, C = self.env.observation_space.shape
assert C < H, f"Input should be in HWC format, not CHW, shape was {self.env.observation_space.shape}"
self.observation_space = gym.spaces.Box(0, 255, (C, H, W), dtype=np.uint8)
def _process_frame(self, obs):
return obs.transpose(2, 0, 1)
def step(self, action):
obs, reward, done, info = self.env.step(action)
return self._process_frame(obs), reward, done, info
def reset(self):
return self._process_frame(self.env.reset())
class ColorTransformWrapper(gym.Wrapper):
def __init__(self, env, color_mode: str):
super().__init__(env)
self.env = env
H, W, C = self.env.observation_space.shape
assert C < H, f"Input should be in HWC format, not CHW, shape was {self.env.observation_space.shape}"
assert color_mode in ["bw", "rgb", "yuv", "hsv"], f'Color mode should be one of ["bw", "rgb", "yuv", "hsv"] but was {color_mode}'
self.expected_input_shape = (H, W, C)
if color_mode in ["bw"]:
assert C in [1, 3]
output_shape = (H, W, 1)
elif color_mode in ["rgb", "yuv", "hsv"]:
assert C == 3, f"Expecting 3 channels, found {C}"
output_shape = (H, W, 3)
else:
raise ValueError("Invalid color mode.")
self.color_mode = color_mode
self.observation_space = gym.spaces.Box(0, 255, output_shape, dtype=np.uint8)
def _process_frame(self, obs: np.ndarray):
assert obs.shape == self.expected_input_shape, f"Shape missmatch, expecting {self.expected_input_shape} found {obs.shape}"
H, W, C = obs.shape
if C == 1:
# this is just a black and white frame
return obs
elif self.color_mode == "bw":
return cv2.cvtColor(obs, cv2.COLOR_RGB2GRAY)[:, :, None]
elif self.color_mode == "yuv":
return cv2.cvtColor(obs, cv2.COLOR_RGB2YUV)
elif self.color_mode == "hsv":
return cv2.cvtColor(obs, cv2.COLOR_RGB2HSV)
elif self.color_mode == "rgb":
return obs
else:
raise ValueError(f"Invalid color_mode {self.color_mode}")
def step(self, action):
obs, reward, done, info = self.env.step(action)
if self.color_mode == "bw":
info["channels"] = ["Gray"]
elif self.color_mode == "rgb":
# present rgb and yuv frames as grayscale
info["channels"] = ["ColorR", "ColorG", "ColorB"]
elif self.color_mode == "yuv":
# present rgb and yuv frames as grayscale
info["channels"] = ["ColorY", "ColorU", "ColorV"]
elif self.color_mode == "hsv":
# present rgb and yuv frames as grayscale
info["channels"] = ["ColorH", "ColorS", "ColorV"]
return self._process_frame(obs), reward, done, info
def reset(self):
obs = self.env.reset()
return self._process_frame(obs)
class DelayedStateDistortionWrapper(gym.Wrapper):
"""
After 5M frames apply an negation filter.
"""
def __init__(self, env, delay: int):
super().__init__(env)
self.env = env
self.frames_seen = 0
self.delay = delay
def _process_frame(self, obs: np.ndarray):
assert obs.dtype == np.uint8
if self.frames_seen < self.delay:
return obs
else:
return 255-obs
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.frames_seen += 1
return self._process_frame(obs), reward, done, info
def reset(self):
obs = self.env.reset()
self.frames_seen += 1
return self._process_frame(obs)
def save_state(self, buffer):
buffer["frames_seen"] = self.frames_seen
def restore_state(self, buffer):
self.frames_seen = buffer["frames_seen"]
class NullActionWrapper(gym.Wrapper):
"""
Allows passing of a negative action to indicate not to proceed the environment forward.
Observation, frozen, info empty, and reward will be 0, done will be false
Child environment will not be stepped.
Helpful for vectorized environments.
"""
def __init__(self, env):
gym.Wrapper.__init__(self, env)
self._prev_obs = None
self._prev_info = {}
def step(self, action:int):
if action < 0:
return self._prev_obs, 0, False, self._prev_info
else:
obs, reward, done, info = self.env.step(action)
self._prev_obs = obs
self._prev_info = info
return obs, reward, done, info
def reset(self, **kwargs):
obs = self.env.reset(**kwargs)
self._prev_obs = obs
return obs
class EpisodeScoreWrapper(gym.Wrapper):
"""
Records episode length and score
"""
def __init__(self, env):
super().__init__(env)
self.ep_score = 0
self.ep_length = 0
def step(self, action:int):
obs, reward, done, info = self.env.step(action)
self.ep_score += reward
self.ep_length += 1
info["ep_score"] = self.ep_score
info["ep_length"] = self.ep_length
return obs, reward, done, info
def reset(self, **kwargs):
obs = self.env.reset(**kwargs)
self.ep_score = 0
self.ep_length = 0
return obs
def save_state(self, buffer):
buffer["ep_score"] = self.ep_score
buffer["ep_length"] = self.ep_length
def restore_state(self, buffer):
self.ep_score = buffer["ep_score"]
self.ep_length = buffer["ep_length"]
class NoopResetWrapper(gym.Wrapper):
"""
Applies a random number of no-op actions before agent can start playing.
From https://github.com/openai/baselines/blob/7c520852d9cf4eaaad326a3d548efc915dc60c10/baselines/common/atari_wrappers.py
"""
def __init__(self, env, noop_max=30):
"""Sample initial states by taking random number of no-ops on reset.
No-op is assumed to be action 0.
"""
gym.Wrapper.__init__(self, env)
self.noop_max = noop_max
self.override_num_noops = None
self.noop_action = 0
self.noop_given = None
assert env.unwrapped.get_action_meanings()[0] == 'NOOP'
def reset(self, **kwargs):
""" Do no-op action for up to noop_max steps.
Note: this differs from openAI's implementation in that theirs would perform at least one noop, but
this one may sometimes perform 0. This means a noop trained agent will do well if tested on no noop.
Actually: if we don't do at least 1 the obs will be wrong, as obs on reset is incorrect for some reason...
one of the wrappers makes a note of this (the stacking one I think). Because of this I always noop for
atleast one action.
"""
obs = self.env.reset(**kwargs)
if self.override_num_noops is not None:
noops = self.override_num_noops
print(f"Forcing {noops} NOOPs.")
else:
noops = np.random.randint(1, self.noop_max+1)
assert noops >= 0
self.noop_given = noops
for _ in range(noops):
obs, _, done, _ = self.env.step(self.noop_action)
if done:
obs = self.env.reset(**kwargs)
return obs
def step(self, ac):
obs, reward, done, info = self.env.step(ac)
if self.noop_given is not None:
info['noop_start'] = self.noop_given
self.noop_given = None
return obs, reward, done, info
class FrameStack(gym.Wrapper):
""" This is the original frame stacker that works by making duplicates of the frames,
For large numbers of frames this can be quite slow.
Input should be h,w,c order
"""
def __init__(self, env, n_stacks=4):
super().__init__(env)
assert len(env.observation_space.shape) == 3, "Invalid shape {}".format(env.observation_space.shape)
assert env.observation_space.dtype == np.uint8, "Invalid dtype {}".format(env.observation_space.dtype)
h, w, c = env.observation_space.shape
assert c < h, "Must have channels first."
self.n_stacks = n_stacks
self.original_channels = c
self.n_channels = self.n_stacks * self.original_channels
self.stack = collections.deque(maxlen=n_stacks)
for i in range(n_stacks):
self._push_obs(np.zeros((h, w, c), dtype=np.uint8))
self.observation_space = gym.spaces.Box(
low=0,
high=255,
shape=(h, w, self.n_channels),
dtype=np.uint8,
)
def _push_obs(self, obs):
self.stack.appendleft(obs)
def get_obs(self):
return np.concatenate(self.stack, axis=-1)
def step(self, action):
obs, reward, done, info = self.env.step(action)
self._push_obs(obs)
if "channels" in info:
info["channels"] = info["channels"] * self.n_stacks
return self.get_obs(), reward, done, info
def reset(self):
obs = self.env.reset()
for _ in range(self.n_stacks):
self._push_obs(obs)
return self.get_obs()
def save_state(self, buffer):
buffer["stack"] = self.stack
def restore_state(self, buffer):
self.stack = buffer["stack"]
class MontezumaInfoWrapper(gym.Wrapper):
"""
From https://github.com/openai/random-network-distillation/blob/master/atari_wrappers.py
"""
def __init__(self, env, room_address=3):
"""
room_address: 3 for montezuma, 1 for pitfall
"""
super(MontezumaInfoWrapper, self).__init__(env)
self.room_address = room_address
self.visited_rooms = set()
def get_current_room(self):
ram = self.env.unwrapped.ale.getRAM()
assert len(ram) == 128
return int(ram[self.room_address])
def step(self, action):
obs, rew, done, info = self.env.step(action)
room_id = self.get_current_room()
self.visited_rooms.add(room_id)
info['room_count'] = len(self.visited_rooms)
if done:
if 'episode' not in info:
info['episode'] = {}
info['episode'].update(visited_rooms=self.visited_rooms.copy())
return obs, rew, done, info
def reset(self):
self.visited_rooms.clear()
return self.env.reset()
class EMAFrameStack(gym.Wrapper):
"""
Maintain EMA of previous states with different alpha values.
"""
def __init__(self, env, n_stacks=4, gamma=2.0):
super().__init__(env)
assert len(env.observation_space.shape) == 3, "Invalid shape {}".format(env.observation_space.shape)
assert env.observation_space.dtype == np.uint8, "Invalid dtype {}".format(env.observation_space.dtype)
c,h,w = env.observation_space.shape
assert c in [1, 3], "Invalid shape {}".format(env.observation_space.shape)
self.n_stacks = n_stacks
self.original_channels = c
self.n_channels = self.n_stacks * self.original_channels
self.gamma = gamma
self.stack = np.zeros((self.n_channels, h, w), dtype=np.float32)
self.observation_space = gym.spaces.Box(
low=0,
high=255,
shape=(self.n_channels, h, w),
dtype=np.uint8,
)
def _push_obs(self, obs):
assert self.original_channels == 1, "Stacking does not support color at the moment."
# alpha is ema
for i in range(self.n_stacks):
alpha = 1/(self.gamma ** i)
self.stack[i] = self.stack[i] * (1-alpha) + obs[:, :, 0] * alpha
def _get_obs(self):
return np.clip(self.stack, 0, 255).astype(np.uint8)
def step(self, action):
obs, reward, done, info = self.env.step(action)
self._push_obs(obs)
if "channels" in info:
info["channels"] = info["channels"] * self.n_stacks
return self._get_obs(), reward, done, info
def reset(self):
obs = self.env.reset()
for i in range(self.n_stacks):
self.stack[i] = obs[:, :, 0]
return self._get_obs()
def save_state(self, buffer):
buffer["stack"] = self.stack
def restore_state(self, buffer):
self.stack = buffer["stack"]
class FrameStack_Lazy(gym.Wrapper):
# taken from https://github.com/openai/baselines/blob/master/baselines/common/atari_wrappers.py
# modified for channels first.
def __init__(self, env, k):
"""Stack k last frames.
Returns lazy array, which is much more memory efficient.
See Also
--------
baselines.common.atari_wrappers.LazyFrames
"""
gym.Wrapper.__init__(self, env)
self.k = k
self.frames = collections.deque([], maxlen=k)
new_shape = list(env.observation_space.shape)
new_shape[0] *= k
new_shape = tuple(new_shape)
self.observation_space = gym.spaces.Box(low=0, high=255, shape=new_shape, dtype=env.observation_space.dtype)
def reset(self):
ob = self.env.reset()
for _ in range(self.k):
self.frames.append(ob)
return self._get_ob()
def step(self, action):
ob, reward, done, info = self.env.step(action)
self.frames.append(ob)
return self._get_ob(), reward, done, info
def _get_ob(self):
assert len(self.frames) == self.k
result = LazyFrames(list(self.frames))
return result
class LazyFrames(object):
# taken from https://github.com/openai/baselines/blob/master/baselines/common/atari_wrappers.py
def __init__(self, frames):
"""This object ensures that common frames between the observations are only stored once.
It exists purely to optimize memory usage which can be huge for DQN's 1M frames replay
buffers.
This object should only be converted to numpy array before being passed to the model.
You'd not believe how complex the previous solution was."""
self._frames = frames
self._out = None
def _force(self):
if self._out is None:
self._out = np.concatenate(self._frames, axis=0)
self._frames = None
return self._out
def __array__(self, dtype=None):
out = self._force()
if dtype is not None:
out = out.astype(dtype)
return out
def __len__(self):
return len(self._force())
def __getitem__(self, i):
return self._force()[i]
def count(self):
frames = self._force()
return frames.shape[frames.ndim - 1]
def frame(self, i):
return self._force()[..., i]
def cast_down(x: Union[str, float, int]):
"""
Try to convert string / float into an integer, float, or string, in that order...
"""
try:
if int(x) == x:
return int(x)
except:
pass
try:
if float(x) == x:
return float(x)
except:
pass
return str(x)
def get_wrapper(env, wrapper_type) -> Union[gym.Wrapper, None]:
"""
Returns first wrapper matching type in environment, or none.
"""
while True:
if type(env) == wrapper_type:
return env
try:
env = env.env
except:
return None | maitchison/PPO | rl/wrappers.py | wrappers.py | py | 59,577 | python | en | code | 14 | github-code | 1 | [
{
"api_name": "gym.Wrapper",
"line_number": 14,
"usage_type": "attribute"
},
{
"api_name": "gym.Env",
"line_number": 19,
"usage_type": "attribute"
},
{
"api_name": "math.log",
"line_number": 44,
"usage_type": "call"
},
{
"api_name": "numpy.ndarray",
"line_numb... |
41547307 | from pathlib import Path
import subprocess
config_path = str((Path(__file__).parent/'sample_config').absolute())
src_path = str((Path(__file__).parent/'empty_article.tex').absolute())
def test_config_file_reading(tmpdir):
"""
Check that config file provided on command line are read.
This is a cli only test, hence difficult to test directly.
Here we use a config file asking for a non-existant theme, then
check a warning appears on stderr.
"""
with tmpdir.as_cwd():
out = subprocess.run(['plastex', '-c', config_path, src_path], check=True,
stderr=subprocess.PIPE).stderr.decode()
assert 'WARNING: Using default renderer for document-layout' in out
| plastex/plastex | unittests/ConfigFileReading.py | ConfigFileReading.py | py | 729 | python | en | code | 240 | github-code | 1 | [
{
"api_name": "pathlib.Path",
"line_number": 5,
"usage_type": "call"
},
{
"api_name": "pathlib.Path",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "subprocess.run",
"line_number": 16,
"usage_type": "call"
},
{
"api_name": "subprocess.PIPE",
"line_nu... |
36028396719 | from django.urls import path, include
from .views import main, UserListView, CreateUserView, UserDetailsView, LoginView, RefreshTokenView
urlpatterns = [
path('', main),
path('user', UserListView.as_view()),
path('refresh-token', RefreshTokenView.as_view()),
path('user/<int:id>', UserDetailsView.as_view()),
path('login', LoginView.as_view()),
path('create-user', CreateUserView.as_view())
] | gabrigomez/django-api | django_project/api/urls.py | urls.py | py | 421 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "django.urls.path",
"line_number": 5,
"usage_type": "call"
},
{
"api_name": "views.main",
"line_number": 5,
"usage_type": "argument"
},
{
"api_name": "django.urls.path",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "views.UserListView.as_v... |
30454792138 | from pathlib import Path
import os, sys
from dash import Dash, html, dcc, Input, Output, callback
import pandas as pd
import plotly.express as px
def read_data(src_file):
df = pd.read_csv(src_file)
return df
def create_time_series(data):
fig = px.scatter(data, x='Date', y='Open')
fig.show()
return fig
if __name__ == '__main__':
project_folder = os.path.join(Path(__file__).parents[0], 'ohlc_data')
filename = r'test.csv'
src_file = os.path.join(project_folder, filename)
data = read_data(src_file)
fig = px.scatter(x=[0, 1, 2, 3, 4], y=[0, 1, 4, 9, 16])
fig.show() | ojudz08/Projects | api/polygon_rest_api/time_series.py | time_series.py | py | 629 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "pandas.read_csv",
"line_number": 11,
"usage_type": "call"
},
{
"api_name": "plotly.express.scatter",
"line_number": 16,
"usage_type": "call"
},
{
"api_name": "plotly.express",
"line_number": 16,
"usage_type": "name"
},
{
"api_name": "os.path.join",
... |
17071084396 | from sklearn.linear_model import LogisticRegression
from MyLogisticRegGen import MyLogisticRegGen
from my_cross_val import my_cross_val
from datasets import prepare_digits
from utils import (
report,
wrapper_args
)
import sys
def q4(argv=None):
dataset, method_name, k, latex = wrapper_args(
argv, 'q4',
['Digits'],
['MyLogisticRegGen', 'LogisticRegression'])
Digits_X, Digits_y = prepare_digits(want_noise=False)
default_order = [
('MyLogisticRegGen', 'Digits'),
('LogisticRegression', 'Digits')
]
methods = {
('MyLogisticRegGen', 'Digits'):
(MyLogisticRegGen(verbose=False), Digits_X, Digits_y),
('LogisticRegression', 'Digits'):
(LogisticRegression(), Digits_X, Digits_y)
}
if dataset == 'all':
order = default_order
else:
order = [(method_name, dataset)]
for key in order:
name, dataset = key
method, X, y = methods[key]
print('==============')
print('method: {}, dataset: {}'.format(key[0], key[1]))
scores = my_cross_val(method, X, y, k)
report(name, dataset, scores, latex=latex)
if __name__ == '__main__':
q4(sys.argv[1:])
| craigching/csci-5521 | csci-5521-hw3/q4.py | q4.py | py | 1,228 | python | en | code | 2 | github-code | 1 | [
{
"api_name": "utils.wrapper_args",
"line_number": 14,
"usage_type": "call"
},
{
"api_name": "datasets.prepare_digits",
"line_number": 19,
"usage_type": "call"
},
{
"api_name": "MyLogisticRegGen.MyLogisticRegGen",
"line_number": 27,
"usage_type": "call"
},
{
"api_... |
3884911703 |
from ast import Try
import requests
from controllers import buyStock,sortStocks, buyFor
if __name__ == '__main__':
stocks = ['AAPL','GOOGL','AMZN','TSLA','FB','TWTR','UBER','LYFT','SNAP','SHOP']
listPrices = []
for stock in stocks:
url = f'https://financialmodelingprep.com/api/v3/quote-short/{stock}?apikey=62524f9a45adfcbd67b6c6f1105ee7f3'
response = requests.get(url)
if response.status_code == 200:
data = response.json()
# print(data[0])
listPrices.append(data[0])
buyAllStock = buyStock(listPrices)
orden = sortStocks(listPrices)
print("\033[1;33m"+"")
print("Ejercicio 1 - A")
print("1-A => El usuario necesita {buyAllStock} para comprar todos los stocks".format(buyAllStock=buyAllStock))
print("")
print("\033[;36m"+"------------------------------")
print("Ejercicio 1 - B")
monto = input("Ingrese un monto de dinero para comprar todos los stocks: ")
for stock in listPrices:
mount = float(monto)
buystockFor = buyFor(mount, stock)
print('1-B => Con $ {mount} el usuario puede comprar {buystockFor} {stock}. Resto = ${rest} '.format(mount=mount,buystockFor=buystockFor[0], stock=stock['symbol'], rest=buystockFor[1]))
| Franzcod/challenge_trii_backend | main.py | main.py | py | 1,379 | python | en | code | 1 | github-code | 1 | [
{
"api_name": "requests.get",
"line_number": 16,
"usage_type": "call"
},
{
"api_name": "controllers.buyStock",
"line_number": 23,
"usage_type": "call"
},
{
"api_name": "controllers.sortStocks",
"line_number": 25,
"usage_type": "call"
},
{
"api_name": "controllers.... |
11641058565 | import os
import pickle
import mediapipe as mp
import cv2
import matplotlib.pyplot as plt
mp_hands = mp.solutions.hands
mp_drawing = mp.solutions.drawing_utils
mp_drawing_styles = mp.solutions.drawing_styles
hands = mp_hands.Hands(static_image_mode=True, min_detection_confidence=0.3)
DATA_DIR = './rawdata'
data = []
labels = []
def multilandmarks(results):
x_ = []
y_ = []
data_list = []
for hand_landmarks in results.multi_hand_landmarks:
i=0
while(i < len(hand_landmarks.landmark)):
x = hand_landmarks.landmark[i].x
y = hand_landmarks.landmark[i].y
x_.append(x)
y_.append(y)
i=i+1
## Normalization of landmarks
i=0
while(i<len(hand_landmarks.landmark)):
x = hand_landmarks.landmark[i].x
y = hand_landmarks.landmark[i].y
data_list.append(x - min(x_))
data_list.append(y - min(y_))
i=i+1
return data_list
for dir_ in os.listdir(DATA_DIR):
for img_path in os.listdir(os.path.join(DATA_DIR, dir_)):
data_list = []
img = cv2.imread(os.path.join(DATA_DIR, dir_, img_path))
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
results = hands.process(img_rgb)
if results.multi_hand_landmarks:
data_list = multilandmarks(results)
data.append(data_list)
labels.append(dir_)
f = open('dataset.pickle', 'wb')
pickle.dump({'data': data, 'labels': labels}, f)
f.close()
| Ajyarra98/SPN_team12 | preprocessing.py | preprocessing.py | py | 1,531 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "mediapipe.solutions",
"line_number": 9,
"usage_type": "attribute"
},
{
"api_name": "mediapipe.solutions",
"line_number": 10,
"usage_type": "attribute"
},
{
"api_name": "mediapipe.solutions",
"line_number": 11,
"usage_type": "attribute"
},
{
"api_nam... |
12623753130 | from .models import Booking
from django import forms
class BookingForm(forms.ModelForm):
class Meta:
model = Booking
fields = (
'guests', 'date', 'time',
'first_name', 'last_name', 'email', 'requirements'
)
widgets = {
'date': forms.DateInput(attrs={'type': 'date'}),
}
time_choices = [
('16:00', '04:00 PM'),
('17:00', '05:00 PM'),
('18:00', '06:00 PM'),
('19:00', '09:00 PM'),
('20:00', '08:00 PM'),
('21:00', '09:00 PM'),
]
time = forms.ChoiceField(
choices=time_choices,
widget=forms.Select(attrs={'class': 'form-control'}),
)
| lucijahajdu/lucia-trattoria | booking/forms.py | forms.py | py | 699 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "django.forms.ModelForm",
"line_number": 5,
"usage_type": "attribute"
},
{
"api_name": "django.forms",
"line_number": 5,
"usage_type": "name"
},
{
"api_name": "models.Booking",
"line_number": 7,
"usage_type": "name"
},
{
"api_name": "django.forms.Dat... |
25299334409 | # coding=utf-8
from selenium import webdriver
import time
import requests
from yundama.dama import indetify
from selenium.webdriver.chrome.options import Options
chrome_options = Options()
# 设置chrome浏览器无界面模式
# 浏览器不提供可视化页面. linux下如果系统不支持可视化不加这条会启动失败
# chrome_options.add_argument('--headless')
chrome_options.add_argument('blink-settings=imagesEnabled=false') # 不加载图片, 提升速度
# phantomjs_driver = r"E:\phantomjs-2.1.1-windows\bin\phantomjs.exe"
chrome_driver = r"E:\chromedriver.exe"
browser = webdriver.Chrome(executable_path=chrome_driver)
browser.get("https://yuese64.com/")
with open("index.html", "w", encoding="utf-8") as f:
f.write(browser.page_source)
print(("=" * 10) + "首页保存成功")
browser.find_element_by_id("login").click()
time.sleep(3)
browser.find_element_by_id("login_username").send_keys("wzq5517992@163.com")
browser.find_element_by_id("login_pass").send_keys("wzq5517992")
# 识别验证码
captcha_image_url = browser.find_element_by_xpath(".//div[@class='image']/img").get_attribute("src")
captcha_content = requests.get(captcha_image_url).content
captcha_code = indetify(captcha_content)
print("验证码的识别结果为:", captcha_code)
time.sleep(5)
# 输入验证码
browser.find_element_by_id("login_code").send_keys(captcha_code)
browser.find_element_by_class_name("submit").click()
# 获取cookie
cookies = {i["name"]: i["value"] for i in browser.get_cookies()}
print(cookies)
browser.get("https://yuese64.com/videos/24177/55d047cd3d624316843b211e7844a845/")
time.sleep(5)
browser.get("https://yuese64.com/videos/24177/55d047cd3d624316843b211e7844a845/")
print(browser.page_source)
time.sleep(3)
# browser.quit()
| wzqq5517992/pythonReptileBasic | 04study/code/login_douban_wzq.py | login_douban_wzq.py | py | 1,763 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "selenium.webdriver.chrome.options.Options",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "selenium.webdriver.Chrome",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "selenium.webdriver",
"line_number": 17,
"usage_type": "name"
},
{
... |
38204007026 | from ..settings import LOGGING
from ..httpclient.client import Client
import logging.config
import urllib3, json, os
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
logging.config.dictConfig(LOGGING)
logger = logging.getLogger(__name__)
class Vnfd(object):
"""VNF Descriptor Class.
This class serves as a wrapper for the Virtual Network Function Descriptor (VNFD)
part of the Northbound Interface (NBI) offered by OSM. The methods defined in this
class help retrieve the VNFDs of OSM.
Attributes:
bearer_token (str): The OSM Authorization Token.
Args:
token (str): The OSM Authorization Token.
"""
def __init__(self, token):
"""VNF Descriptor Class Constructor."""
self.__client = Client(verify_ssl_cert=False)
self.bearer_token = token
OSM_COMPONENTS = os.environ.get('OSM_COMPONENTS')
if OSM_COMPONENTS is None:
print('NO OSM_COMPONENTS in ENV')
else:
self.OSM_COMPONENTS = json.loads(OSM_COMPONENTS)
def get_list(self):
"""Fetch a list of the VNF descriptors.
Returns:
object: A requests object that includes the list of VNFDs
Examples:
>>> from nbiapi.identity import bearer_token
>>> from nbiapi.vnfd import Vnfd
>>> from settings import OSM_ADMIN_CREDENTIALS
>>> token = bearer_token(OSM_ADMIN_CREDENTIALS.get('username'), OSM_ADMIN_CREDENTIALS.get('password'))
>>> vnfd = Vnfd(token)
>>> response = vnfd.get_list()
OSM Cli:
$ osm vnfd-list
"""
endpoint = '{}/osm/vnfpkgm/v1/vnf_packages'.format(self.OSM_COMPONENTS.get('NBI-API'))
headers = {"Authorization": "Bearer {}".format(self.bearer_token), "Accept": "application/json"}
response = self.__client.get(endpoint, headers)
logger.debug("Request `GET {}` returns HTTP status `{}`, headers `{}` and body `{}`."
.format(response.url, response.status_code, response.headers, response.text))
return response
def get(self, vnfd_uuid=None):
"""Fetch details of a specific VNF descriptor.
Args:
vnfd_uuid (str): The UUID of the VNFD to fetch details for.
Returns:
object: A requests object.
Examples:
>>> from nbiapi.identity import bearer_token
>>> from nbiapi.vnfd import Vnfd
>>> from settings import OSM_ADMIN_CREDENTIALS
>>> token = bearer_token(OSM_ADMIN_CREDENTIALS.get('username'), OSM_ADMIN_CREDENTIALS.get('password'))
>>> vnfd = Vnfd(token)
>>> response = vnfd.get(vnfd_uuid='89f66f1b-73b5-4dc1-8226-a473a2615627')
OSM Cli:
$ osm vnfd-show cirros_vnf
"""
endpoint = '{}/osm/vnfpkgm/v1/vnf_packages/{}'.format(self.OSM_COMPONENTS.get('NBI-API'), vnfd_uuid)
headers = {"Authorization": "Bearer {}".format(self.bearer_token), "Accept": "application/json"}
response = self.__client.get(endpoint, headers)
logger.debug("Request `GET {}` returns HTTP status `{}`, headers `{}` and body `{}`."
.format(response.url, response.status_code, response.headers, response.text))
return response
| sonata-nfv/son-monitor | vnv_manager/app/api/management/commands/osm/nbiapi/vnfd.py | vnfd.py | py | 3,322 | python | en | code | 5 | github-code | 1 | [
{
"api_name": "urllib3.disable_warnings",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "urllib3.exceptions",
"line_number": 6,
"usage_type": "attribute"
},
{
"api_name": "logging.config.config.dictConfig",
"line_number": 7,
"usage_type": "call"
},
{
"ap... |
7422977067 | import numpy as np
from typing import Union, Optional, Any
from ..utils.vector import Vector
Position = Vector
Action = int
Cost = Union[int, float]
AdjList = dict[int, list[tuple[int, Cost]]]
AdjMatrix = np.ndarray
PosToIdx = dict[Position, int]
TextMaze = list[list[str]]
GameResult = dict[str, Optional[Any]]
Params = dict[str, Any]
| ShkarupaDC/game_ai | src/consts/types.py | types.py | py | 338 | python | en | code | 2 | github-code | 1 | [
{
"api_name": "utils.vector.Vector",
"line_number": 6,
"usage_type": "name"
},
{
"api_name": "typing.Union",
"line_number": 8,
"usage_type": "name"
},
{
"api_name": "numpy.ndarray",
"line_number": 10,
"usage_type": "attribute"
},
{
"api_name": "typing.Optional",
... |
29453264686 | #
from typing import Union
from torch_geometric.typing import Adj, PairTensor, OptTensor
import torch
from torch import Tensor
from torch_geometric.nn.conv import GraphConv
#
class SpatialGraphConv(GraphConv):
r"""
Extension to Pytorch Geometric GraphConv
which is implementing the operator of
`"Weisfeiler and Leman Go Neural:
Higher-order Graph Neural Networks"
<https://arxiv.org/abs/1810.02244>`_ paper,
adding the difference of x_i and x_j in the propagation:
.. math::
\mathbf{x}^{\prime}_i = \mathbf{\Theta}_1 \mathbf{x}_i +
\mathbf{\Theta}_2 \sum_{j \in \mathcal{N}(i)} e_{j,i} \cdot (
\mathbf{x}_j - \mathbf{x}_i)
"""
def forward(self,
x: Union[Tensor, PairTensor],
edge_index: Adj,
edge_weight: OptTensor = None,
**kwargs) -> Tensor:
""""""
if isinstance(x, Tensor):
x: PairTensor = (x, x)
out = self.propagate(edge_index, x=x,
edge_weight=edge_weight,
size=None)
out = self.lin_rel(out)
x_r = x[1]
if x_r is not None:
out += self.lin_root(x_r)
return out
def message(self, x_i: Tensor, x_j: Tensor, edge_weight: OptTensor) -> Tensor:
return x_j-x_i if edge_weight is None else edge_weight.view(-1, 1) * (x_j-x_i)
#
# ============= #
# ### TEST #### #
# ============= #
def _test(
cuda=False,
seed=1,
):
import sys
device = torch.device("cuda" if cuda and torch.cuda.is_available() else "cpu")
torch.manual_seed(seed)
N = 4
D = 16
x = torch.randn(N, D).to(device)
edge_index = torch.tensor([[0, 1, 2, 2, 3, 3], [0, 0, 1, 1, 3, 2]]).to(device)
edge_weight = torch.randn(edge_index.size(-1)).to(device)
conv = SpatialGraphConv(D, D).to(device)
try:
x = conv(x, edge_index, edge_weight)
assert x.size() == (N, D)
except Exception as e:
raise type(e)(str(e)).with_traceback(sys.exc_info()[2])
| jokofa/NRR | lib/model/networks/spatial_graph_conv.py | spatial_graph_conv.py | py | 2,077 | python | en | code | 2 | github-code | 1 | [
{
"api_name": "torch_geometric.nn.conv.GraphConv",
"line_number": 11,
"usage_type": "name"
},
{
"api_name": "typing.Union",
"line_number": 28,
"usage_type": "name"
},
{
"api_name": "torch.Tensor",
"line_number": 28,
"usage_type": "name"
},
{
"api_name": "torch_geo... |
25859782549 | from dataclasses import dataclass
import time
import enum
import logging
from contextlib import contextmanager
import os
from typing import Optional, Any # This is support for type hints
from log_calls import log_calls # For logging errors and stuff
#from setting import SettingDescription
from . import nlp, audio, anim
@dataclass
class Scene:
"""This represents a scene. We can have multiple scenes in a setting.
Each scene has an id, name, description, and a list of characters involved."""
id: int
name: str
description: str # conversation description
characters: list[Any]
history: str = ""
# NOTE: dataclasses solve this constructor. ember knew what they were doing and louis is dumb
# revisit this and unbreak it
def prompt_for_gpt3(self) -> str:
"""Return the entire prompt to GPT3"""
return f"{self.description}{' '.join(c.desc for c in self.characters)}\n{self.history}"
def animate(self, character, charLine: str):
"""Used to animate a specific character based on the text input
onto a specific animation node's audio stream listener"""
# Generate response
updatedHistory = self.history+f"\n{character.name}:{charLine}\n"
responseEmotion = nlp.get_emotion(charLine)
# Generate wav, selecting wav file
wavPath = audio.generate_wav(charLine, responseEmotion, lang="en-US", outputPath=f"/scripts/ai/ai_{self.name}")
# Execute animation
anim.animate(wavPath, character.primitivePath)
# audio.cleanup(wavPath, outputPath)
# Format response
responseData = {"responseText": charLine}
def make_speak(self, character, primitivePath) -> str:
"""Tell a character something and speak its response to primitivePath, returning what we spoke as text"""
prompt = self.prompt_for_gpt3()
textResponse, updatedHistory = self._model_does_reply_thingy(prompt, character) # Generate response
responseEmotion = nlp.get_emotion(textResponse)
self.history += textResponse
print(f"Response: {textResponse} with computed emotion {responseEmotion}")
wavPath = audio.generate_wav(textResponse, "en-US-TonyNeural") # Generate wav
print(f"{character.name}: {textResponse}")
anim.animate(wavPath, primitivePath) # Execute animation
# audio.cleanup(wavPath, outputPath) # Erases after speaking
return textResponse
def save_history(self, outputDir="recording/script_output/"):
"""Save the conversation to a history file in recording/script_output/{hid}_history.txt"""
dirname = os.path.dirname(__file__)
histdir = os.path.join(dirname,f"../{outputDir}")
if not os.path.exists(histdir):
os.mkdir(histdir)
historyPath = os.path.join(histdir, f"{str(self.id) + str(time.time())}_history.txt")
with open(historyPath, "w") as historyFile:
historyFile.write(self.history)
print(f"just wrote the history:\n{self.history}")
def _model_does_reply_thingy(self, promptText:str, character):
"""User gives an input to GPT3, and gets a response and the updated history."""
#print(character)
narrative_next = f"\nYou: {promptText}\n{character.name}:"
responsePrompt = self.description + narrative_next
# responsePrompt = f"""
# {sessionData[sessionDescription]}
# {characterDescription}
# You: {promptText}
# {characterName}:"""
response = nlp.get_completion(self.history + responsePrompt)
# print("DEBUG PROMPT: ", examplePrompt + responsePrompt)
# print("\n\n")
# print("DEBUG RESPONSE: ", response)
# responseEmotion = get_completion(f"""
# Sentence:
# Emotion:
# ###
# Sentence:
# Emotion:
# ###
# """)
updatedHistory = responsePrompt + response
return response, updatedHistory
def __str__(self):
return repr(self)
@contextmanager
def make_scene(id, name, description, characters):
"""makes sure a scene's save history is always saved!"""
# resource = Scene(*args, **kwds)
resource = Scene(id,name,description,characters)
try:
yield resource
finally:
# ALWAYS save the history, no matter what.
resource.save_history() | Halcyox/XRAgents | xragents/scene.py | scene.py | py | 4,448 | python | en | code | 3 | github-code | 1 | [
{
"api_name": "typing.Any",
"line_number": 21,
"usage_type": "name"
},
{
"api_name": "os.path.dirname",
"line_number": 66,
"usage_type": "call"
},
{
"api_name": "os.path",
"line_number": 66,
"usage_type": "attribute"
},
{
"api_name": "os.path.join",
"line_numb... |
1421544026 | from .data import DataSet, Options, enums
"""
Example usage.
"""
if __name__ == '__main__':
# load data:
data = DataSet.Dataset("/path/to/data")
#visualize with plt
data.subfolders[0].datapoints[0].visualize()
#save to location
data.subfolders[0].datapoints[0].save("/home/path/to/save/to", "filename_without_extension")
#change options
options = Options.Options(
visiualization_option=enums.visiualization_option.RANDOM,
save_option=enums.save_option.RANDOM,
amount=5,
indices=[1],
circle_radius=2,
line_thickness=2,
circle_colors=[(100,255,255), (255,255,100), (255,100,255), (100,100,100)],
line_colors=[(100,255,0), (0,255,100)]
)
#pass options to vis/save:
data.subfolders[1].datapoints[1].visualize(options) | JustusDroege/grasp_dataset_convenience_pack | main.py | main.py | py | 931 | python | en | code | 4 | github-code | 1 | [
{
"api_name": "data.DataSet.Dataset",
"line_number": 8,
"usage_type": "call"
},
{
"api_name": "data.DataSet",
"line_number": 8,
"usage_type": "name"
},
{
"api_name": "data.subfolders",
"line_number": 11,
"usage_type": "attribute"
},
{
"api_name": "data.subfolders"... |
13110433306 | import json
def txt_to_tar_dict(src):
'''
Convert text file with course info into dictionary
File line format: COURSE TITLE/PREREQ1,PREREQ2,.../COREQS/RESTRICTIONS
'''
out = {}
for course in src:
title, prereqs, coreqs, restricts = course.strip().split('/')
#Mark no courses listed as empty lists, otherwise convert strings into list
prereqs = [] if prereqs == '' else prereqs.split(',')
coreqs = [] if coreqs == '' else coreqs.split(',')
restricts = [] if restricts == '' else restricts.split(',')
out[title] = {
'prerequisites': prereqs,
'corequisites': coreqs,
'restrictions': restricts
}
return out
def tar_to_src_dict(tar_dict):
'''
Convert dictionary with courses as targets into dictionary with courses as sources
'''
out = dict()
def init_course(d, c):
d[c] = {
'prerequisites': [],
'corequisites': [],
'restrictions': []
}
#Iterate over every course
for course in tar_dict:
#Add course in src_dict
if course not in out:
init_course(out, course)
#Iterate over data (prereqs, coreqs, restricts)
for data in tar_dict[course]:
#Itereate over every course listed in first course's data
for c in tar_dict[course][data]:
#If haven't seen c yet, add it to src_dict
if c not in out:
init_course(out, c)
#Don't want to write wrong courses to dict
if c[:4] == course[:4]:
out[c][data].append(course)
return out
if __name__ == '__main__':
sbj = 'bio'
txt_file = sbj + '_courses.txt'
tar_file = sbj + '_tar.json'
src_file = sbj + '_src.json'
with open(txt_file, 'r') as src, open(tar_file, 'w+') as t, open(src_file, 'w+') as s:
tar_dict = txt_to_tar_dict(src)
json.dump(tar_dict, t, ensure_ascii=False, indent=4)
json.dump(tar_to_src_dict(tar_dict), s, ensure_ascii=False, indent=4)
| RolandRiachi/PrerequisitesGraph | courseData/coursePages/txt_to_var.py | txt_to_var.py | py | 2,089 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "json.dump",
"line_number": 67,
"usage_type": "call"
},
{
"api_name": "json.dump",
"line_number": 68,
"usage_type": "call"
}
] |
26743105833 | """
Audio beacon and code transmission of Module 2
"""
import time
import serial
import pyaudio
import numpy as np
import matplotlib.pyplot as plt
import serial.tools.list_ports
# # Sampling data
Fs = 48000
# Time_recording = S # in seconds
# N_mic = 5 # number of mics/channels
# N = Time_recording * Fs # number of frames per mic
# N_total = N_mic * N # total number of samples
# filename_loc = '380x100'
# filename = f'Mic-Data\Mic-Data-V1\kitt_carrier_2250_bit_3k_{filename_loc}'
# Chosen carrier=2250 Hz, bit=3000 Hz, and rep=1250
def start_pairing():
"""
This function starts the pairing mode to KITT,
transmission takes place over port 7
"""
# get port info
ports = serial.tools.list_ports.comports()
for i in range(len(ports)):
print(f"{i} - {ports[i].description}")
comport = 'COM7'
# comport = ports[int(input(f"Enter device index: \n"))].device
# global comport
global serial_port
# getting access to bluetooth link
try:
serial_port = serial.Serial(comport, 115200, rtscts=True)
print("Port details ->", serial_port)
except serial.SerialException as var:
print("Error has occured")
print("var")
else:
print("connected, serial port opened")
# Carrier freq = 7 kHz
carrier_frequency = (2250).to_bytes(2, byteorder='big')
serial_port.write(b'F' + carrier_frequency + b'\n')
time.sleep(0.1)
# Bit freq = 2 kHz
bit_frequency = (3000).to_bytes(2, byteorder='big')
serial_port.write(b'B' + bit_frequency + b'\n')
time.sleep(0.1)
# Repetition count = bit freq / repetition freq
repetition_count = (1250).to_bytes(2, byteorder='big')
serial_port.write(b'R' + repetition_count + b'\n')
time.sleep(0.1)
# Gold code
code = 0x3355A780.to_bytes(4, byteorder='big')
serial_port.write(b'C' + code + b'\n')
time.sleep(0.1)
# On
serial_port.write(b'A1\n')
# Speaker playback duration
time.sleep(3)
return
def mic_recording(S):#s
# Create instance of PyAudio
pyaudio_handle = pyaudio.PyAudio()
# List the index and names of all audio devices visible to PyAudio
for i in range(pyaudio_handle.get_device_count()):
device_info = pyaudio_handle.get_device_info_by_index(i)
print(i, device_info['name'])
# Automate the correct PyAudio device index
desired_device_name1 = "Microphone (AudioBox 1818 VSL)"
desired_device_name2 = "Microphone (2- AudioBox 1818 VS"
desired_device_name3 = "Microphone (2- AudioBox 1818 VSL)"
desired_device_name4 = "Microfoon (AudioBox 1818 VSL)"
desired_device_name5 = "Microfoon (2- AudioBox 1818 VS"
desired_device_name6 = "Microfoon (2- AudioBox 1818 VSL)"
for i in range(pyaudio_handle.get_device_count()):
device_info = pyaudio_handle.get_device_info_by_index(i)
if (device_info["name"] == desired_device_name1 or
device_info["name"] == desired_device_name2 or
device_info["name"] == desired_device_name3 or
device_info["name"] == desired_device_name4 or
device_info["name"] == desired_device_name5 or
device_info["name"] == desired_device_name6):
device_index = i
break
stream = pyaudio_handle.open(input_device_index=device_index,
channels=5,
format=pyaudio.paInt16,
rate=Fs,
input=True)
Time_recording = S # in seconds
N_mic = 5 # number of mics/channels
N = Time_recording * Fs # number of frames per mic
N_total = N_mic * N # total number of samples
# Recording and storing mic data
print('recording')
samples = stream.read(N)
print('recording finish')
data = np.frombuffer(samples, dtype='int16')
# with open(f'{filename}.txt', 'w') as file:
# for sample in data:
# file.write("%s\n" % sample)
# print("Data stored")
return data
def plotting():
# Plotting the microphone data
dataTotal = np.loadtxt(f'{filename}.txt')
data0 = dataTotal[0:N_total:5]
data1 = dataTotal[1:N_total:5]
data2 = dataTotal[2:N_total:5]
data3 = dataTotal[3:N_total:5]
data4 = dataTotal[4:N_total:5]
# Create an array for time based on the length of the data
time_total = np.arange(len(dataTotal)) / Fs
time = np.arange(len(data2)) / Fs
# Plot Datatotal
plt.plot(time_total, dataTotal)
plt.xlabel('Time (seconds)')
plt.ylabel('Amplitude')
plt.title('Audio Recording')
# Plot each channel
# Create subplots for each microphone channel
fig, axs = plt.subplots(5, 1, figsize=(8, 10))
# Plot the data for each microphone
axs[0].plot(time[:int(Fs*2)], data0[:int(Fs*2)], label='Microphone 1')
axs[1].plot(time[:int(Fs*2)], data1[:int(Fs*2)], label='Microphone 2')
axs[2].plot(time[:int(Fs*2)], data2[:int(Fs*2)], label='Microphone 3')
axs[3].plot(time[:int(Fs*2)], data3[:int(Fs*2)], label='Microphone 4')
axs[4].plot(time[:int(Fs*2)], data4[:int(Fs*2)], label='Microphone 5')
# Set labels and title for each subplot
for i in range(5):
axs[i].set_ylabel('Amplitude')
axs[i].set_title('Microphone ' + str(i + 1))
# Set labels and title for the entire figure
# fig.suptitle('Data of the five microphones', ha='center')
axs[-1].set_xlabel('Time [s]')
# Adjust spacing between subplots
plt.tight_layout()
# with open(f'../../../ref_ch3_V1.txt', 'w') as file:
# for sample in data2:
# file.write("%s\n" % sample)
# print("Data stored")
# Export plot
plt.savefig(f'Plots-Report/{filename_loc}_report.svg', format='svg')
# Display the plot
plt.show()
return
def stop_pairing():
# Close connection
serial_port.close()
print("Disconnected\n")
return
# def main():
# start_pairing()
# mic_recording()
# serial_port.write(b'A0\n') # off
# plotting()
# stop_pairing()
# return
# main()
| dlacle/EPO-4 | epo4/Module2/Module2_mic_array/AudioBeacon.py | AudioBeacon.py | py | 6,133 | python | en | code | 3 | github-code | 1 | [
{
"api_name": "serial.tools.list_ports.comports",
"line_number": 29,
"usage_type": "call"
},
{
"api_name": "serial.tools",
"line_number": 29,
"usage_type": "attribute"
},
{
"api_name": "serial.Serial",
"line_number": 39,
"usage_type": "call"
},
{
"api_name": "seri... |
21106157201 | #!/usr/bin/env python3
#
# submit_tX_tests.py
# Written: Nov 2018
# Last modified: 2019-11-09 RJH
#
# Python imports
from os import getenv
import sys
import json
import logging
import subprocess
# ======================================================================
# User settings
USE_LOCALCOMPOSE_URL = True
MAX_JOBS_TO_SUBMIT = 1
# Can also choose ONE or NONE of the following
# OPTIONAL_JOB_LIST = ['test_WA_en_udb']
# The following field can either be a single string or a collection of strings
# OPTIONAL_JOB_STARTSWITH = 'line1'
# REVIEW_FLAG = True # Shows all the URLs again at the end
# AUTO_OPEN_IN_BROWSER = True
# Choose one of the following
#TEST_PREFIXES = ('',)
TEST_PREFIXES = ('dev-',)
#TEST_PREFIXES = ('', 'dev-',)
LOCAL_FILEPATH = '/mnt/SSD/uW/Software/'
# ======================================================================
if USE_LOCALCOMPOSE_URL: assert TEST_PREFIXES == ('dev-',)
LOCAL_COMPOSE_URL = 'http://127.0.0.1:8090/'
TEST_FOLDER = f'{LOCAL_FILEPATH}/testPayloads/JSON/tX/'
DATA_SET = [
# First entry is a status flag
# currently 'matched', 'success', or 'test'
# Second entry is main name of .json file containing test payload
('PDF', 'test_tX.OBS-PDF.uW--kn_obs--master'),
('PDF', 'test_tX.OBS-PDF.uW--en_obs--master--no_created_from'),
('PDF', 'test_tX.OBS-PDF.uW--en_obs--master'),
('PDF', 'test_tX.OBS-PDF.Catalog--rmy-x-vwa_obs--master'),
('PDF', 'test_tX.OBS-PDF.Catalog--sr-Latn_obs--master'), # Fails -- might be a case problem
# ('max', 'test_tX.HTML.maximum'),
# ('min', 'test_tX.HTML.minimum'),
]
tested = set()
numSubmittedJobs = 0
for n, (status,testType) in enumerate(DATA_SET):
if numSubmittedJobs >= MAX_JOBS_TO_SUBMIT: break
try: job_list = OPTIONAL_JOB_LIST
except NameError: job_list = None
if job_list:
if testType not in job_list: continue
else:
try: job_startswith = OPTIONAL_JOB_STARTSWITH
except NameError: job_startswith = ''
if job_startswith:
if isinstance(job_startswith, str):
if not testType.startswith(job_startswith): continue
elif isinstance(job_startswith, (list,set,tuple)):
ok = False
for this_job_startswith_string in job_startswith:
if testType.startswith(this_job_startswith_string): ok = True; break
if not ok: continue
else: halt # fault
else:
# Adjust according to what status fields you want
#if status in ('matched','success'): continue
#if status != 'testNow': continue
#if not testType.startswith('line'): continue
pass
tested.add( testType )
numSubmittedJobs += 1
for prefix in TEST_PREFIXES:
long_prefix = 'develop' if prefix else 'git'
webhook = LOCAL_COMPOSE_URL if USE_LOCALCOMPOSE_URL else f'https://{long_prefix}.door43.org/tx/'
print( f"\n\n{n+1}/ {'(dev) ' if prefix else ''}{testType} to {webhook}:" )
jsonFilename = f'@{TEST_FOLDER}{testType}.json'
# Use curl to actually POST the JSON to the given webhook URL
parameters = ['curl', webhook, '-d', jsonFilename,
'--header', "Content-Type: application/json", '--header', "X-Gogs-Event: push",]
myProcess = subprocess.Popen( parameters, stdout=subprocess.PIPE, stderr=subprocess.PIPE )
programOutputBytes, programErrorOutputBytes = myProcess.communicate()
# Process the output from curl
if programOutputBytes:
programOutputString = programOutputBytes.decode(encoding='utf-8', errors='replace')
#programOutputString = programOutputString.replace( baseFolder + ('' if baseFolder[-1]=='/' else '/'), '' ) # Remove long file paths to make it easier for the user to read
#with open( os.path.join( outputFolder, 'ScriptOutput.txt" ), 'wt', encoding='utf-8' ) as myFile: myFile.write( programOutputString )
#print( f"Response = {programOutputString!r}" )
if programOutputString.startswith('{'): # Assume it's a json dict
responseDict = json.loads(programOutputString)
if responseDict['status'] == 'queued':
print( " Job successfully queued" )
# else:
print( f"Response dict = {responseDict}" )
else:
print( f"Response = {programOutputString!r}" )
if programErrorOutputBytes:
programErrorOutputString = programErrorOutputBytes.decode(encoding='utf-8', errors='replace')
#with open( os.path.join( outputFolder, 'ScriptErrorOutput.txt" ), 'wt', encoding='utf-8' ) as myFile: myFile.write( programErrorOutputString )
if not programErrorOutputString.startswith(' % Total'):
print( f"pEOS = {programErrorOutputString!r}" )
# url = f"https://{'dev.' if prefix else ''}door43.org/u/{webURL}/"
# print(f"View result at {url}")
# if AUTO_OPEN_IN_BROWSER:
# import webbrowser
# webbrowser.open(url, new=0, autoraise=True)
# #subprocess.Popen(['xdg-open', url])
# if REVIEW_FLAG and len(tested)>1: # Don't bother if there's only one
# print(f"\n\nSUMMARY:{' (should automatically open in browser)' if AUTO_OPEN_IN_BROWSER else ''}")
# for n, webURL in enumerate(tested):
# if len(TEST_PREFIXES) > 1:
# print(f" {n+1}/"
# f" View at https://{'dev.' if TEST_PREFIXES[0] else ''}door43.org/u/{webURL}/"
# f" and at https://{'dev.' if TEST_PREFIXES[1] else ''}door43.org/u/{webURL}/")
# else:
# print(f"{n+1}/"
# f" View at https://{'dev.' if TEST_PREFIXES[0] else ''}door43.org/u/{webURL}/")
| unfoldingWord-dev/tools | tx/submit_tX_tests.py | submit_tX_tests.py | py | 5,841 | python | en | code | 8 | github-code | 1 | [
{
"api_name": "subprocess.Popen",
"line_number": 98,
"usage_type": "call"
},
{
"api_name": "subprocess.PIPE",
"line_number": 98,
"usage_type": "attribute"
},
{
"api_name": "json.loads",
"line_number": 108,
"usage_type": "call"
}
] |
26799682088 | import streamlit as st
from PIL import Image
import pandas as pd
import matplotlib.pyplot as plt
add_selectbox = st.sidebar.selectbox(
"목차",
("체질량 계산기", "갭마인더", "마이페이지")
)
if add_selectbox == "체질량 계산기":
#체질량 치수 구하는 랩
#몸무게, 키입력받기
st.write('#체질량 치수 계산기')
height = st.number_input('키를 입력하시오.(cm)', value = 181, step =5)
st.write(height,'cm')
weight = st.number_input('체중을 입력하시오.(kg)', value = 73, step =5)
st.write(weight,'kg')
bmi = weight/((height/100)**2)
def bmi_range(bmi):
if bmi>=25:
st.error("비만입니다!!!")
elif bmi >=23:
st.success("과체중입니다.")
elif bmi >=18.5:
st.info("당신은 정상입니다")
else:
st.warning("과체중입니다!")
if st.button('계산'):
st.balloons()
st.write('당신의 체질량 지수는', round(bmi,2), '입니다.')
bmi_range(bmi)
image = Image.open('porsche-1851246_960_720.jpg')
st.image(image, caption='i will buy this car')
elif add_selectbox == '갭마인더':
st.write('#여기는 갭마인더 입니다.')
data = pd.read_csv('gapminder.csv')
st.write(data)
colors = []
for x in data['continent']:
if x == 'Asia':
colors.append('tomato')
elif x =='Europe':
colors.append('blue')
elif x == 'Africa':
colors.append('olive')
elif x == 'Americas':
colors.append('green')
else:
colors.append('orange')
data['colors'] = 'colors'
year = st.slider('년도를 선택하세요.', 1952,2005,2023,step = 5)
st.write("year :",year)
data = data[data["year"] == year]
fig, ax = plt.subplots()
ax.scatter(data['gdpPercap'],data['lifeExp'],s=data['pop']*0.000001)
ax.set_title("How Does Gdp per Capital relate to Life Expectancy")
ax.set_xlabel("Gdp per Capital")
ax.set_ylabel('Life Expectancy')
st.pyplot(fig)
else:
st.write('#여기는 마이페이지 입니다.')
| Dongkka912/smartMobility | home.py | home.py | py | 2,234 | python | ko | code | 0 | github-code | 1 | [
{
"api_name": "streamlit.sidebar.selectbox",
"line_number": 6,
"usage_type": "call"
},
{
"api_name": "streamlit.sidebar",
"line_number": 6,
"usage_type": "attribute"
},
{
"api_name": "streamlit.write",
"line_number": 15,
"usage_type": "call"
},
{
"api_name": "stre... |
32407624536 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import pandas as pd
import matplotlib.pyplot as plt
import math
import numpy as np
# initialize the graph
def initializeGraph():
x = data['x']
y = data['y']
plt.scatter(x, y)
plt.plot(plot1[0], plot1[1], 'go-', label='line 1', linewidth=2)
plt.plot(plot2[0], plot2[1], 'go-', label='line 2', linewidth=2)
plt.show()
def initializeGraphZ():
new = data.loc[data['sensor'] == 'crack']
y = new['x'].drop_duplicates().values
z = [0] * len(y)
arr = new['z'].drop_duplicates().values
y = np.append(y, y)
z = np.append(z, arr)
new_dict = {}
for i in range(0,len(y)):
if(y[i] in new_dict):
new_dict[y[i]].append([y[i], z[i]])
else:
new_dict[y[i]] = [[y[i], z[i]]]
for i in new_dict:
# print(new_dict[i])
t = new_dict[i]
plt.plot([t[0][0], t[1][0]], [t[0][1], t[1][1]], 'go-', linewidth=2)
def checkNodesById():
start = 0
end = 0
for i in range(1, 7):
start = i
end = i + 3
checkVelocity(start, end)
def checkVelocity(start, end):
records = location['velocity'].loc[(location['from'] == start)
& (location['to'] == end) | (location['to'] == start)
& (location['from'] == end)].tolist()
arr = location.loc[(location['from'] == start) & (location['to']
== end) | (location['to'] == start)
& (location['from'] == end)]
notMiddleLine = compareVelocity(start, end, arr)
# draw middle line
if records[0] == records[1] and records[0] \
== crack_line_node_balance:
new_node = createNewNode(start, end)
setGlobalData(new_node)
initializeGraph()
elif notMiddleLine:
middle = getMiddleLine(start, end)
side_crack = storeSideCrack(notMiddleLine, middle, start)
new_node = createNewSideNode(start, side_crack, end)
setGlobalData(new_node)
initializeGraph()
else:
print(start, end)
print('velocity not in range'+"\n")
def createNewNode(start, end):
sum_of_sensor = data['x'].loc[(data['sensor'] == start)
| (data['sensor'] == end)].sum()
velocity = location['velocity'].loc[(location['from'] == start)
& (location['to'] == end)]
crack_location_x = round((sum_of_sensor / 2).astype(int), 2)
crack_location_y = round(data['y'].loc[data['sensor']
== start].values[0], 2)
crack_location_z = calculateDepth(velocity.values)
new_index = data.iloc[-1].name + 1
new_node = [['crack', crack_location_x, crack_location_y, crack_location_z[0]]]
storeCrackLine(crack_location_x, crack_location_y) # store line graph
new_data_frame = pd.DataFrame(new_node, columns=['sensor', 'x', 'y', 'z'
])
new_data_frame.name = new_index
return new_data_frame
def createNewSideNode(start, crack, end):
velocity = location['velocity'].loc[(location['from'] == start)
& (location['to'] == end)]
crack_location_x = round(crack, 2)
crack_location_y = round(data['y'].loc[data['sensor']
== start].values[0], 2)
crack_location_z = calculateDepth(velocity.values)
new_index = data.iloc[-1].name + 1
new_node = [['crack', crack_location_x, crack_location_y, crack_location_z[0]]]
storeCrackLine(crack_location_x, crack_location_y ) # store line graph
new_data_frame = pd.DataFrame(new_node, columns=['sensor', 'x', 'y',
'z'
])
new_data_frame.name = new_index
return new_data_frame
def storeCrackLine(x, y):
plot1[0].append(x)
plot1[1].append(y)
def setGlobalData(new_data):
global data
new = data.append(new_data)
data = new
def compareVelocity(start, end, array):
from_to = array['velocity'].loc[(location['from'] == start)
& (location['to'] == end)].iloc[0] \
- 1328
to_from = array['velocity'].loc[(location['to'] == start)
& (location['from']
== end)].iloc[0] - 1328
if(from_to >= max_velocity or from_to <= min_velocity):
return False
if(to_from >= max_velocity or to_from <= min_velocity):
return False
if abs(from_to) == abs(to_from):
return from_to
else:
return False
def storeSideCrack(velocity, middle, start):
global max_velocity, min_velocity, radius_bet_two_sensor
node = 0
# check positive and smaller than max velocity
if velocity < max_velocity and velocity >= 1:
node = velocity / max_velocity * radius_bet_two_sensor
elif velocity > min_velocity and velocity <= -1:
# check negative and smaller than min velocity
node = -(velocity / min_velocity) * radius_bet_two_sensor
return middle + node
def getMiddleLine(start, end):
global data
incre = 10
arr = data['x'].loc[(data['sensor'] == start) | (data['sensor']
== end)]
if arr.iloc[1] > 15:
return arr.iloc[1] - arr.iloc[0] + incre
return arr.iloc[1] - arr.iloc[0]
# calculate Z
def calculateDepth(velocity):
depthOfCrack = (velocity/ (frequency * 1000 * 2) ) * 1000
print('depth of crack= '+ str(depthOfCrack[0]))
return depthOfCrack
def threeDimensionGraph():
global data, plot
fig = plt.figure()
ax = plt.axes(projection='3d')
# scatter
xdata = pd.concat([data['x'], pd.Series(plot1[0])], axis = 0) # left bottom hand
ydata = pd.concat([data['y'], pd.Series(plot1[1])], axis = 0) # right bottom hand
zdata = pd.concat([data['z'], pd.Series([0,0,0])], axis = 0) # right side
# line graph
for i in range(2):
xline = plot1[0]
yline = plot1[1]
if(i == 0):
zline = [0,0,0]
else:
zline = data['z'].loc[data['sensor'] == 'crack']
ax.plot3D(xline, yline, zline, 'gray')
# xline = [10, 10]
# yline = [5, 5]
# zline = [0, 6.67]
temp = plot1
for i in range(0,3):
z = data.loc[(data['sensor'] == 'crack') & (data['x'] == temp[0][i]) & (data['y'] == temp[1][i])]
xline = [temp[0][i]]*2
yline = [temp[1][i]]*2
zline = [0, z['z']]
ax.plot3D(xline, yline, zline, 'gray')
ax.scatter3D(xdata, ydata, zdata, c=zdata);
def main():
initializeGraph()
checkNodesById()
initializeGraphZ()
threeDimensionGraph()
if __name__ == '__main__':
max_velocity = 559
min_velocity = -559
radius_bet_two_sensor = 5
frequency = 100
crack_line_node_balance = 1328 # Crack line in middle
location = pd.read_csv('C:/Users/User/Desktop/FYP 2021/data.csv')
data = pd.read_csv('C:/Users/User/Desktop/FYP 2021/My PZT location.csv')
plot1 = [[], []] # crack line
plot2 = [[], []]
main()
| franklee809/3d-matplot | main.py | main.py | py | 7,130 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "matplotlib.pyplot.scatter",
"line_number": 14,
"usage_type": "call"
},
{
"api_name": "matplotlib.pyplot",
"line_number": 14,
"usage_type": "name"
},
{
"api_name": "matplotlib.pyplot.plot",
"line_number": 15,
"usage_type": "call"
},
{
"api_name": "ma... |
1538742126 | import numpy as np
import matplotlib.pylab as plt
from scipy.stats import norm
from src.MiniProjects.DeltaHedging.SingleVariableDeltaHedgingValuator import SingleVariableDeltaHedgingValuator
from src.SolverMC.SingleVariableSimulator import SingleVariableSimulator
__author__ = 'frank.ma'
class SingleVariableDeltaHedging(object):
def __init__(self, simulator: SingleVariableSimulator, valuator: SingleVariableDeltaHedgingValuator,
n_steps: int, tau: float, rf: float, threshold: float):
self.simulator = simulator
self.valuator = valuator
self.n_steps = n_steps
self.tau = tau
self.rf = rf
self.threshold = threshold
self.taus = np.linspace(tau, 0.0, num=n_steps + 1)
# set up all required states
self.s_curr = self.simulator.curr
self.s_prev = self.s_curr.copy()
self.opt_amount = np.full(simulator.n_scenarios, -1.0) # sell one option on one share as always
self.share_amount = valuator.delta(self.s_curr, tau) # initial delta
self.cash_acc = valuator.price(self.s_curr, tau) # premium income upon set up
self.cash_acc -= self.s_curr * self.share_amount # self-financing
self.reb_count = np.ones(simulator.n_scenarios) # rebalance counter
def evolve(self, dt: float):
self.cash_acc *= np.exp(dt * self.rf) # cash account accumulate interest rate
self.tau -= dt
if self.tau < 0.0:
raise AttributeError('option is already expired after incremental time %r.' % dt)
self.s_prev = self.s_curr.copy()
self.s_curr = self.simulator.evolve(dt)
pass
def rebalance(self):
# indicator to apply rebalance
reb = np.abs(self.s_curr / self.s_prev - 1.0) > self.threshold
delta = self.valuator.delta(self.s_curr, self.tau)
reb_value = (self.share_amount - delta) * self.s_curr # rebalance cash amount
self.share_amount[reb] = delta[reb]
self.cash_acc[reb] += reb_value[reb]
self.reb_count[reb] += 1
pass
def evaluate(self):
option_acc = self.opt_amount * self.valuator.price(self.s_curr, self.tau)
stock_acc = self.share_amount * self.s_curr
return self.cash_acc + option_acc + stock_acc
def graphical_analysis(self):
value = self.evaluate()
mean = np.average(value)
std = np.std(value)
bins = np.linspace(-5.0, 5.0, num=101)
norms = norm.pdf(bins, mean, std)
plt.hist(value, bins, normed=True)
plt.plot(bins, norms)
plt.title('hedged value at ttm of %r' % self.tau)
plt.xlim([bins[0], bins[-1]])
plt.ylim([0.0, 1.0])
plt.show()
def simulate_to_terminal(self):
for idx, ttm in enumerate(self.taus[1:]):
dt = self.taus[idx] - ttm
self.evolve(dt)
self.rebalance()
pass
| frankma/Finance | src/MiniProjects/DeltaHedging/SingleVariableDeltaHedging.py | SingleVariableDeltaHedging.py | py | 2,917 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "src.SolverMC.SingleVariableSimulator.SingleVariableSimulator",
"line_number": 12,
"usage_type": "name"
},
{
"api_name": "src.MiniProjects.DeltaHedging.SingleVariableDeltaHedgingValuator.SingleVariableDeltaHedgingValuator",
"line_number": 12,
"usage_type": "name"
},
{
... |
3168634647 | from collections import OrderedDict
from typing import Tuple, Union
from fvcore.common.registry import Registry
from omegaconf.listconfig import ListConfig
import copy
import threading
import numpy as np
import torch
import torch.nn.functional as F
from torch import nn
from timm.models.vision_transformer import _cfg
from .. import ModifiedResNet, VisualTransformer, DistilledVisionTransformer
IMAGE_HEADS_REGISTRY = Registry("IMAGE_HEADS")
IMAGE_HEADS_REGISTRY.__doc__ = """
Registry for image encoders.
"""
def build_image_head(cfg, **kwargs):
return IMAGE_HEADS_REGISTRY.get(cfg.name)(cfg, **kwargs)
@IMAGE_HEADS_REGISTRY.register()
class ImageHead(nn.Module):
def __init__(self, cfg, **kwargs):
super().__init__()
if isinstance(cfg.layers, (tuple, list, ListConfig)):
heads = cfg.width * 32 // 64
self.encoder = ModifiedResNet(
input_resolution=cfg.resolution,
output_dim=cfg.embed_dim,
layers=cfg.layers,
width=cfg.width,
heads=heads,
)
else:
heads = cfg.width // 64
self.encoder = VisualTransformer(
input_resolution=cfg.resolution,
output_dim=cfg.embed_dim,
patch_size=cfg.patch_size,
layers=cfg.layers,
width=cfg.width,
heads=heads,
)
def copy_state_dict(self, state_dict):
self.encoder.load_state_dict(state_dict)
def forward(self, images, *args, **kwargs):
z = self.encoder(images)
if kwargs.get("normalized", False):
z = z / z.norm(dim=-1, keepdim=True)
#print(f"{threading.current_thread().ident} image --{kwargs.get('normalized', False)}")
return z
@IMAGE_HEADS_REGISTRY.register()
class DeiTImageHead(nn.Module):
def __init__(self, cfg, **kwargs):
super().__init__()
heads = cfg.width // 64
self.encoder = DistilledVisionTransformer(
img_size=cfg.resolution,
patch_size=cfg.patch_size,
representation_size=cfg.embed_dim,
embed_dim=cfg.width,
depth=cfg.layers,
num_heads=heads,
mlp_ratio=4,
qkv_bias=True,
norm_layer=partial(nn.LayerNorm, eps=1e-6),
**kwargs
)
def copy_state_dict(self, state_dict):
self.encoder.load_state_dict(state_dict)
def forward(self, images, *args, **kwargs):
cls_z, distilled_z = self.encoder.forward_features(images)
z = (cls_z + distilled_z) / 2
if kwargs.get("normalized", False):
z = z / z.norm(dim=-1, keepdim=True)
#print(f"{threading.current_thread().ident} image --{kwargs.get('normalized', False)}")
return z
| zhaoyanpeng/vipant | cvap/module/encoder/image_head.py | image_head.py | py | 2,844 | python | en | code | 19 | github-code | 1 | [
{
"api_name": "fvcore.common.registry.Registry",
"line_number": 17,
"usage_type": "call"
},
{
"api_name": "torch.nn.Module",
"line_number": 26,
"usage_type": "attribute"
},
{
"api_name": "torch.nn",
"line_number": 26,
"usage_type": "name"
},
{
"api_name": "omegaco... |
12037898208 | from flask import jsonify
from sqlalchemy.exc import IntegrityError
from actor_libs.database.orm import db
from actor_libs.errors import ReferencedError
from actor_libs.utils import get_delete_ids
from app import auth
from app.models import Device, Group, GroupDevice, User, EndDevice, Gateway
from app.schemas import GroupSchema, GroupDeviceSchema
from . import bp
@bp.route('/groups')
@auth.login_required
def list_groups():
query = Group.query.outerjoin(GroupDevice) \
.outerjoin(EndDevice, EndDevice.id == GroupDevice.c.deviceIntID) \
.outerjoin(Gateway, Gateway.id == GroupDevice.c.deviceIntID) \
.with_entities(Group,
db.func.count(EndDevice.id).label('endDeviceCount'),
db.func.count(Gateway.id).label('gatewayCount')) \
.group_by(Group)
records = query.pagination()
return jsonify(records)
@bp.route('/groups/<int:group_id>')
@auth.login_required
def view_group(group_id):
query = Group.query \
.join(User, User.id == Group.userIntID) \
.with_entities(Group, User.username) \
.filter(Group.id == group_id)
record = query.to_dict()
return jsonify(record)
@bp.route('/groups', methods=['POST'])
@auth.login_required
def create_group():
request_dict = GroupSchema.validate_request()
group = Group()
new_group = group.create(request_dict)
record = new_group.to_dict()
return jsonify(record), 201
@bp.route('/groups/<int:group_id>', methods=['PUT'])
@auth.login_required
def update_group(group_id):
group = Group.query.filter(Group.id == group_id).first_or_404()
request_dict = GroupSchema.validate_request(obj=group)
updated_group = group.update(request_dict)
record = updated_group.to_dict()
return jsonify(record)
@bp.route('/groups', methods=['DELETE'])
@auth.login_required
def delete_group():
delete_ids = get_delete_ids()
query_results = Group.query.filter(Group.id.in_(delete_ids)).many()
try:
for group in query_results:
device_count = db.session.query(db.func.count(GroupDevice.c.deviceIntID)) \
.filter(GroupDevice.c.groupID == group.groupID).scalar()
if device_count > 0:
raise ReferencedError(field='device')
db.session.delete(group)
db.session.commit()
except IntegrityError:
raise ReferencedError()
return '', 204
@bp.route('/groups/<int:group_id>/devices')
@auth.login_required
def view_group_devices(group_id):
group = Group.query.with_entities(Group.groupID) \
.filter(Group.id == group_id).first_or_404()
query = Device.query \
.join(GroupDevice, GroupDevice.c.deviceIntID == Device.id) \
.filter(GroupDevice.c.groupID == group.groupID)
records = query.pagination(code_list=['typeLabel'])
return jsonify(records)
@bp.route('/groups/<int:group_id>/devices', methods=['POST'])
@auth.login_required
def add_group_devices(group_id):
group = Group.query.filter(Group.id == group_id).first_or_404()
request_dict = GroupDeviceSchema.validate_request()
devices = request_dict['devices']
group.devices.extend(devices)
group.update()
return '', 201
@bp.route('/groups/<int:group_id>/devices', methods=['DELETE'])
@auth.login_required
def delete_group_devices(group_id):
group = Group.query.filter(Group.id == group_id).first_or_404()
delete_ids = get_delete_ids()
devices = Device.query \
.join(GroupDevice, GroupDevice.c.deviceIntID == Device.id) \
.filter(GroupDevice.c.groupID == group.groupID,
Device.id.in_(delete_ids)).all()
for device in devices:
group.devices.remove(device)
group.update()
return '', 204
| actorcloud/ActorCloud | server/app/services/devices/views/groups.py | groups.py | py | 3,749 | python | en | code | 181 | github-code | 1 | [
{
"api_name": "app.models.Group",
"line_number": 22,
"usage_type": "argument"
},
{
"api_name": "app.models.Group",
"line_number": 19,
"usage_type": "argument"
},
{
"api_name": "app.models.Gateway",
"line_number": 18,
"usage_type": "argument"
},
{
"api_name": "app.... |
30217682087 | import requests
import json
import time
import re
import random
from user_agent_tool import *
class converse_post():
def __init__(self):
self._num = 1
self._user_agent_tool = user_agent_tool()
self._session = requests.session()
self._isEnoughStock_response = {
'returnUrl':'',
'isEnoughStock':'',
'uuid':''
}
self._orderId = None
self._user_email = None
def __requests_get(self,url,headers):
response = self._session.get(url = url,headers = headers,verify = False)
response.encoding = 'utf-8'
if response.status_code == '403':
print('在get访问{}时,服务器拒绝访问...'.format(url))
return response
def __requests_post(self,url,headers,data):
response = self._session.post(url = url,headers = headers,data = data,verify = False)
response.encoding = 'utf-8'
if response.status_code == '403':
print('在post访问{}时,服务器拒绝访问...'.format(url))
return response
def set_cookies(self,cookies_dict):
self._session.cookies.clear_session_cookies()
cookieJar = requests.cookies.RequestsCookieJar()
for cookie in cookies_dict:
cookieJar.set(cookie["name"], cookie["value"])
self._session.cookies.update(cookieJar)
def isEnoughStock(self,item_jmskuCode,item_url):
url = 'https://m.converse.com.cn/isEnoughStock.json'
headers = {
'Host':'m.converse.com.cn',
'User-Agent':self._user_agent_tool.get_user_agent(),
'Accept':'*/*',
'Accept-Language':'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Accept-Encoding':'gzip, deflate, br',
'Referer':item_url,
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'X-Requested-With':'XMLHttpRequest',
'Content-Length':'59',
'Connection':'keep-alive'
}
data = {
'challenge':'',
'validate':'',
'seccode':'',
'jmskuCode':item_jmskuCode,
'num':self._num
}
response = self.__requests_post(url,headers,data)
print('isEnoughStock:',response.text)
try:
json_data = json.loads(response.text)
except json.JSONDecodeError:
print('处理{}的isEnoughStock时发生:json.decoder.JSONDecodeError错误'.format(item_jmskuCode))
return False
if 'uuid' in json_data:
self._isEnoughStock_response['uuid'] = json_data['uuid']
else:
self._isEnoughStock_response['uuid'] = ''
if 'returnUrl' in json_data:
self._isEnoughStock_response['returnUrl'] = json_data['returnUrl']
else:
self._isEnoughStock_response['returnUrl'] = ''
try:
isEnoughStock_result = json_data['isEnoughStock']
except:
print('处理{}的json_data["isEnoughStock"]时出错,默认返回False'.format(item_jmskuCode))
isEnoughStock_result = False
return isEnoughStock_result
def nowBuy(self,item_jmskuCode,item_url):
if len(self._isEnoughStock_response['returnUrl']) < 1:
url = 'https://m.converse.com.cn/nowBuy.htm?loxiaflag={}'.format(str(round(time.time() * 1000)))
else:
url = 'https://m.converse.com.cn'+str(self._isEnoughStock_response['returnUrl'])
headers = {
'Host':'m.converse.com.cn',
'User-Agent':self._user_agent_tool.get_user_agent(),
'Accept':'*/*',
'Accept-Language':'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Accept-Encoding':'gzip, deflate, br',
'Referer':item_url,
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'X-Requested-With':'XMLHttpRequest',
'Content-Length':'66',
'Connection':'keep-alive'
}
data_uid = {
'jmskuCode':item_jmskuCode,
'num':self._num,
'uid':self._isEnoughStock_response['uuid']
}
data = {
'jmskuCode':item_jmskuCode,
'num':self._num,
}
if len(self._isEnoughStock_response['uuid']) > 1:
data = data_uid
response = self.__requests_post(url,headers,data)
print(response.text)
if '超过限制' in response.text:
return 'ERROR'
if '请先登录' in response.text:
input('登录状态失效,请登录后回车继续...')
return False
json_data = json.loads(response.text)
try:
if json_data['returnCode'] == 'E':
print('购买失败...')
return False
except Exception:
pass
return True
def checkout(self,item_url):
url = 'https://m.converse.com.cn/order/checkout.htm?isBuyNow=true'
headers = {
'Host':'m.converse.com.cn',
'User-Agent':self._user_agent_tool.get_user_agent(),
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language':'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Accept-Encoding':'gzip, deflate, br',
'Referer':item_url,
'Connection':'keep-alive',
'Upgrade-Insecure-Requests':'1'
}
response = self.__requests_get(url,headers)
orderId_patton = '[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}'
result = re.findall(orderId_patton,response.text)
if len(result) > 0:
self._orderId = result[0]
else:
self._orderId = ''
print('获取订单ID失败...')
return False
print('订单ID:',self._orderId)
return True
def createOrder(self,item_name):
url = 'https://m.converse.com.cn/order/createOrder.json'
headers = {
'Host':'m.converse.com.cn',
'User-Agent':self._user_agent_tool.get_user_agent(),
'Accept':'*/*',
'Accept-Language':'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Accept-Encoding':'gzip, deflate, br',
'Referer':'https://m.converse.com.cn/order/checkout.htm?isBuyNow=true',
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'X-Requested-With':'XMLHttpRequest',
'Content-Length':'217',
'Connection':'keep-alive'
}
data = {
'invoiceType':'PERSON',
'companyName':'',
'companyTax':'',
'paymentType':'601',
'paymentBank':'zhifubao',
'isBuyNow':'true',
'orderId':self._orderId,
'challenge':'',
'validate':'',
'seccode':'',
'screenwidth':'360',
'screenheight':'720',
'track':[]
}
if len(self._orderId) == 0:
print('订单编号不存在,订单提交失败...')
return
# response = self.session.post(url=url,headers=headers,data=data,verify=False,timeout = 30,proxies = self.proxies)
response = self.__requests_post(url=url,headers=headers,data=data)
print('status_code : ',response.status_code)
if str(response.status_code) == '403':
print('在创建订单时,服务器拒绝访问...')
return False
response.encoding = 'utf-8'
f = open('./成功记录.txt','a+',encoding = 'utf-8')
log = '{log_time}\t{item_name}\t{user_email}\t{result}\n'.format(log_time = str(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())),item_name = item_name,user_email = self._user_email,result = str(response.text))
f.write(log)
f.close()
print(response.text)
return str(response.text)
def set_user_email(self):
url = 'https://m.converse.com.cn/myshop/updatememberinfo.htm?loxiaflag='+str(round(time.time() * 1000))
headers = {
'Host':'m.converse.com.cn',
'User-Agent':self._user_agent_tool.get_user_agent(),
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language':'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
'Accept-Encoding':'gzip, deflate, br',
'Referer':'https://m.converse.com.cn/member/addressList.htm?loxiaflag={}'.format(str(round(time.time()*1000)-random.randint(1000,3000))),
'Connection':'keep-alive',
'Upgrade-Insecure-Requests':'1'
}
response = self.__requests_get(url,headers)
html_doc = response.text
reg = "[\w!#$%&'*+/=?^_`{|}~-]+(?:\.[\w!#$%&'*+/=?^_`{|}~-]+)*@(?:[\w](?:[\w-]*[\w])?\.)+[\w](?:[\w-]*[\w])?"
email_list = re.findall(reg,html_doc)
result_list = []
for emailAdd in email_list:
if (emailAdd != 'example@converse.com'):
result_list.append(emailAdd)
print('当前账号的邮箱地址为:{},若邮箱地址为空说明登录状态已经丢失'.format(result_list))
self._user_email = str(result_list) | mycodeset/Converse | Selenium下单工具/converse_post.py | converse_post.py | py | 9,396 | python | en | code | 0 | github-code | 1 | [
{
"api_name": "requests.session",
"line_number": 11,
"usage_type": "call"
},
{
"api_name": "requests.cookies.RequestsCookieJar",
"line_number": 36,
"usage_type": "call"
},
{
"api_name": "requests.cookies",
"line_number": 36,
"usage_type": "attribute"
},
{
"api_nam... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.