blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5f294a5e640f3b1056fb0d8d1e67096fdd54b08e | 87c8a7018736ad3c3ccaae20fd2e9efdfdb06150 | /calc_gap.py | d60704b5be0bc82f84771fca617f28d4f6551f7d | [
"Apache-2.0"
] | permissive | xiw9/youtube-8m | 81d03c51d4cc460103ac4a1e0d8fad0bf1781196 | 40854c4788639542ae88f041f950aa3bbc435895 | refs/heads/master | 2021-01-25T08:06:58.609145 | 2017-06-08T16:50:20 | 2017-06-08T16:50:20 | 93,714,639 | 0 | 0 | null | 2017-06-08T06:25:04 | 2017-06-08T06:25:04 | null | UTF-8 | Python | false | false | 662 | py | #/usr/bin/python
import numpy as np
import sys
import eval_util
predf = [x.strip() for x in open(sys.argv[1])]
name = []
pred = np.zeros((len(predf)-1, 4716), dtype='float32')
k = 0
for x in predf[1:]:
name += [x.split(',')[0],]
p = x.split(',')[1].split()
for i in range(0, len(p), 2):
pred[k][int(p[i])] = float(p[i+1])
k += 1
gtf = '/DATACENTER/3/xiw/yt8m/validate_labels.csv'
gta = {}
for x in open(gtf):
l = x.strip().split(',')
gta[l[0]] = l[1]
gt = np.zeros((len(predf)-1, 4716), dtype='float32')
k = 0
for n in name:
p = gta[n].split()
for i in p:
gt[k][int(i)] = 1
k += 1
print eval_util.calculate_gap(pred, gt, top_k=20)
| [
"i@xiw.im"
] | i@xiw.im |
7679bb26208128c5f4faecd9a41265ad09655262 | 8772775c64c242eb93d17de60d2c5fa599eb2b61 | /project1/project1-code/plot.py | 7016c712f0e86720cd2b2e25939b125d5757b5df | [] | no_license | aggrace/DataVisualization | 6995eaecfc0e5921c445a395775ef276677bf3b2 | fb9d758ce17211ee5d51b031a5b67f2ce74f334b | refs/heads/master | 2020-04-10T11:22:12.846412 | 2018-12-17T02:42:15 | 2018-12-17T02:42:15 | 160,991,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,312 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 18 11:52:48 2018
@author: jiawei
"""
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import matplotlib.dates as mdates
from dateutil import parser
import seaborn as sns
# Data for plotting
whourly = pd.read_csv("~/Desktop/weather_hourly_2017-2018.csv")
wdaily = pd.read_csv("~/Desktop/weather_daily_2017-2018.csv")
# 读取米兰的城市气象数据
#df_milano = pd.read_csv('milano_270615.csv')
# 取出我们要分析的温度和日期数据
#y1 = wdaily1['temperatureHigh']
#x1 = wdaily1['time']
y1 = wdaily['precipIntensity']
x1 = wdaily['time']
y1 = whourly['visibility']
x1 = whourly['time']
#y2 = wdaily['temperatureHigh']
#x2 = wdaily['time']
day_milano = [parser.parse(x) for x in x1]
fig, ax = plt.subplots()
plt.xticks(rotation=70)
days = mdates.DateFormatter('%Y-%m-%d')
ax.xaxis.set_major_formatter(days)
ax.plot(day_milano ,y1, 'b')
#ax.plot(day_milano1 ,y2, 'r')
plt.title("PrecipIntensity Changes of Washingotn dc, 2017-2018")
plt.xlabel('Day')
plt.ylabel('PrecipIntensity')
fig
df3 = whourly[['temperature','precipIntensity','humidity','visibility']].copy()
plt.title("Area of temperature, precipIntensity, humidity, visibility in Washingotn dc, 2017-2018")
#plt.xlabel('PrecipIntensity')
#plt.ylabel('Distribution')
df3.plot.area()
fig, ax = plt.subplots(figsize=(5, 3))
ax.stackplot(days, rng + rnd, labels=['Eastasia', 'Eurasia', 'Oceania'])
ax.set_title('Combined debt growth over time')
ax.legend(loc='upper left')
ax.set_ylabel('Total debt')
ax.set_xlim(xmin=yrs[0], xmax=yrs[-1])
fig.tight_layout()
rng = np.arange(50)
rnd = np.random.randint(0, 10, size=(3, rng.size))
print(rnd)
######################################
import pandas as pd
import numpy as np
from sklearn import preprocessing
hourly = pd.read_csv('~/Desktop/hourly.csv')
scaler = preprocessing.MinMaxScaler()
df3 = hourly[['temperature','precipIntensity','humidity','visibility']].copy()
#dfTest = pd.DataFrame({'A':[14.00,90.20,90.95,96.27,91.21],'B':[103.02,107.26,110.35,114.23,114.68], 'C':['big','small','big','small','small']})
min_max_scaler = preprocessing.MinMaxScaler()
def scaleColumns(df, cols_to_scale):
for col in cols_to_scale:
df[col] = pd.DataFrame(min_max_scaler.fit_transform(pd.DataFrame(df3[col])),columns=[col])
return df
df = scaleColumns(df3, )
df3['temperature'] = pd.DataFrame(min_max_scaler.fit_transform(pd.DataFrame(df3['temperature'])),columns=['temperature'])
df3['precipIntensity'] = pd.DataFrame(min_max_scaler.fit_transform(pd.DataFrame(df3['precipIntensity'])),columns=['precipIntensity'])
df3['humidity'] = pd.DataFrame(min_max_scaler.fit_transform(pd.DataFrame(df3['humidity'])),columns=['humidity'])
df3['visibility'] = pd.DataFrame(min_max_scaler.fit_transform(pd.DataFrame(df3['visibility'])),columns=['visibility'])
plt.stackplot(days, df3['temperature'],df3['precipIntensity'],df3['humidity'],df3['visibility'], colors=['m','c','r','k'])
df3 = df3.set_index(hourly['time'])
df3.plot.area()
plt.title("temperature, precipIntensity, humidity, visibility of Washingotn dc, 2017-2018")
plt.xlabel('Hours')
plt.ylabel('value')
ax.set_xticklabels(day_milano, rotation=0)
plt.xlim(day_milano)
df3['visibility']
ax.plot(day_milano ,df3['visibility'], 'b')
#ax.plot(day_milano1 ,y2, 'r')
plt.title("PrecipIntensity Changes of Washingotn dc, 2017-2018")
plt.xlabel('Day')
plt.ylabel('PrecipIntensity')
# 显示图像
fig
df3['visibility'].isnull().sum().sum()
plt.plot([],[],color='m', label='temperature', linewidth=5)
plt.plot([],[],color='c', label='precipIntensity', linewidth=5)
plt.plot([],[],color='r', label='humidity', linewidth=5)
plt.plot([],[],color='k', label='visibility', linewidth=5)
plt.stackplot(whourly['time'], df3['temperature'],df3['precipIntensity'],df3['humidity'],df3['visibility'], colors=['m','c','r','k'])
plt.xlabel('x')
plt.ylabel('y')
plt.title('Interesting Graph\nCheck it out')
plt.legend()
plt.show()
labels = ["temperature ", "precipIntensity", "humidity",'visibility']
fig, ax = plt.subplots()
ax.stackplot(day_milano, df3['temperature'],df3['precipIntensity'],df3['humidity'],df3['visibility'], labels=labels)
ax.legend(loc='upper left')
plt.show()
| [
"noreply@github.com"
] | aggrace.noreply@github.com |
dcb9f44b576400f74540d716d1cbf17691e26d39 | 91051a42e16c3292f06324eb1ffb49a732aef24e | /orders/urls.py | de8573bb2896e9b2b9ce36b740f52ffb08ec5f35 | [] | no_license | FranciscoGumescindo/PizzeriaPinochios | 2ef54d89d4836bca50793438d35090d33ae57d7b | 49fec47546d1afb4bd2af279175230219a27e431 | refs/heads/master | 2020-05-03T07:57:33.160104 | 2019-03-30T04:59:33 | 2019-03-30T04:59:33 | 178,512,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,429 | py | from django.urls import include,path
from django.contrib import admin
from . import views
urlpatterns=[
path("admin",admin.site.urls),
path("",views.index,name="index"),
path("login",views.login,name="login"),
path("logout",views.logout,name = "logout"),
path("register",views.register,name = "register"),
path("customize_pizza",views.customize_pizza,name = "customize_pizza"),
path("add_pizza_to_cart",views.add_pizza_to_cart,name= "add_pizza_to_cart"),
path("customize_sub",views.customize_sub,name="customize_sub"),
path("sub_addons",views.sub_addons,name="sub_addons"),
path("sub_sizes",views.sub_sizes,name="sub_sizes"),
path("add_sub_to_cart",views.add_sub_to_cart,name="add_sub_to_cart"),
path("shopping_cart",views.shopping_cart,name="shopping_cart"),
path("shopping_cart_items",views.shopping_cart_items,name="shopping_cart_items"),
path("add_item_to_cart",views.add_item_to_cart,name="add_item_to_cart"),
path("remove_item_from_cart",views.remove_item_from_cart,name="remove_item_from_cart"),
path("order",views.order,name="order"),
path("submit_order",views.submit_order,name="submit_order"),
path("view_orders",views.view_orders,name="view_orders"),
path("order_history",views.order_history,name="order_history"),
path("contact_view",views.contact_view,name="contact_view"),
path("hours_view",views.hours_view,name="hours_view"),
] | [
"142t0583@itsm.edu.mx"
] | 142t0583@itsm.edu.mx |
31d645cbe541bc2ac6d23555845f13b8574d8761 | cbf6fb2a0d93ca13c84b418d33a43c0ef33e866b | /92.py | 2e5eb6c947ffd56b4e75ac6fc47e01b7d43569fe | [] | no_license | chejeanx/euler | 1fddcc964b7b48c24d8c656d5a515668726f10eb | f9e10897b84a05fa5e9ca8049131191e6bd0735f | refs/heads/master | 2021-01-20T15:07:21.243637 | 2017-06-22T15:19:00 | 2017-06-22T15:19:00 | 90,722,860 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | # Square digit chains
# https://projecteuler.net/problem=92
# Returns terminating number in square digit chain for startN
def squareDigitChain(startN, currentN, chainOutcomes):
if currentN == 1 or currentN == 89:
chainOutcomes[startN] = currentN
return currentN
if currentN in chainOutcomes:
return chainOutcomes[currentN]
answer = 0
for char in str(currentN):
answer += int(char) ** 2
return squareDigitChain(startN, answer, chainOutcomes)
# Returns number of numbers below maxNum that terminate their square digit chains at 89
def squareDigit89s(maxNum):
num89s = 0
for num in range(1, maxNum):
if num % 10000 == 0: print(num)
if squareDigitChain(num, num, {}) == 89:
num89s += 1
return num89s
print(squareDigit89s(10000000)) | [
"neverwhere@Neverwheres-MacBook-Pro.local"
] | neverwhere@Neverwheres-MacBook-Pro.local |
32494599245aa677b21fc5cb674eefbdd423995d | bca4dd686666ec49de189a4a31de8a377c26b7ad | /maincode.py | 31b7365fcfa0ea737d495ac925769b42f2e1459e | [] | no_license | arunpallsingh/Bus-reservation-system | bb9c0e076666735e6b5bdfb0ed7e9a67fe5f0801 | e7c7adf090b5aaf855b5fc57dbded466cea94771 | refs/heads/main | 2023-02-07T06:48:01.487524 | 2020-12-24T16:33:38 | 2020-12-24T16:33:38 | 324,141,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,021 | py | # -*- coding: utf-8 -*-
"""Maincode.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1QVTs70DkThqYnihhYDbeE-lC2Ni0duFS
"""
from tkinter import *
from PIL import Image,ImageTk
from tkinter import messagebox
import pymysql
root=Tk()
root.title("BUS RESERVATION SYSTEM")
root.geometry("500x700")
Label(root,text="BUS RESERVATION FORM",width=25,
font=("airal",20,"bold"),
relief="solid").pack()
image=Image.open("download.jpg")
photo=ImageTk.PhotoImage(image)
Label(root,image=photo).place(x=150,y=50)
Label(root,text="TRAVELLER NAME",width=20,
font=("airal",10,"bold"),
relief="solid").place(x=50,y=250)
name=StringVar()
Entry(root,textvariable=name).place(x=300,y=250)
Label(root,text="TRAVELLER EMAIL",width=25,
font=("airal",10,"bold"),
relief="solid").place(x=50,y=290)
eml=StringVar()
Entry(root,textvariable=eml).place(x=300,y=290)
Label(root,text="TRAVELLING FROM",width=25,
font=("airal",10,"bold"),
relief="solid").place(x=50,y=330)
ec=StringVar(root)
ec.set("TRAVELLING FROM")
OptionMenu(root, ec,"DELHI","GURUGRAM","NOIDA","GHAZIABAD","ROHTAK","HARIDWAR").place(x=300,y=325)
Label(root,text="TRAVELLING TO",width=25,
font=("airal",10,"bold"),
relief="solid").place(x=50,y=360)
wc=StringVar(root)
wc.set("TRAVELLING TO")
OptionMenu(root, wc,"DELHI","GURUGRAM","NOIDA","GHAZIABAD","ROHTAK","HARIDWAR").place(x=300,y=358)
Label(root,text="DATE(dd/mm/yyyy)",width=20,
font=("airal",10,"bold"),
relief="solid").place(x=50,y=398)
date=StringVar()
Entry(root,textvariable=date).place(x=300,y=398)
Label(root,text="TRAVELLER GENDER",width=25,
font=("airal",10,"bold"),
relief="solid").place(x=50,y=440)
var1=IntVar()
var2=IntVar()
Checkbutton(root, text = "male",
variable = var1,
onvalue = 1,
offvalue = 0).place(x=300,y=440)
Checkbutton(root,text="female",
variable = var2,
onvalue = 1,
offvalue = 0).place(x=380,y=440)
Label(root,text="TYPE OF BUS",width=25,
font=("airal",10,"bold"),
relief="solid").place(x=50,y=480)
var=IntVar()
Radiobutton(root,text="AC",
variable=var,value=1).place(x=300,y=480)
Radiobutton(root,text="NON AC",
variable=var,value=2).place(x=380,y=480)
def new():
r1=Tk()
r1.mainloop()
menubar=Menu(root)
file=Menu(menubar,tearoff=0)
menubar.add_cascade(label="file",menu=file)
file.add_command(label="new file",command=new)
file.add_command(label="edit",command=None)
file.add_command(label="help",command=None)
file.add_separator()
file.add_command(label="exit",command=root.destroy)
edit=Menu(menubar,tearoff=0)
menubar.add_cascade(label="edit",menu=edit)
edit.add_command(label="edit file",command=None)
edit.add_command(label="edit info",command=None)
edit.add_command(label="help",command=None)
edit.add_separator()
edit.add_command(label="exit",command=root.destroy)
root.config(menu=menubar)
def submit():
nm=name.get()
email=eml.get()
frm=ec.get()
to=wc.get()
dte=date.get()
ch=var.get()
mode={1:"AC",2:"NON-AC"}
gen=""
if Checkvar1.get()==1:
gen=gen+"MALE"
elif Checkvar2.get()==2:
gen=gen+"FEMALE"
messagebox.showinfo("APPLICATION STATUS",message="APPLICATION SUCCESSFUL")
conn=pymysql.connect("localhost","root","","project")
cur=conn.cursor()
q="insert into backscreen1 values(%s,%s,%s,%s,%s,%s,%s)"
val=(nm,email,frm,to,dte,mode[ch],gen)
cur.execute(q,val)
conn.commit()
conn.close()
def exit():
root.destroy()
Button(root,text="SUBMIT INFO",width=15,
font=("airal",10,"bold"),
relief="solid",command=submit).place(x=150,y=600)
Button(root,text="EXIT",width=15,
font=("airal",10,"bold"),
relief="solid",command=exit).place(x=300,y=600)
root.resizable(0,0)
root.mainloop() | [
"noreply@github.com"
] | arunpallsingh.noreply@github.com |
c4106edbd383d886c587152300a55c0a9d23ba64 | cc7660ce7883d227afe5769ec80f2d2923bd4eb5 | /app/migrations/0002_auto_20191024_0919.py | 6b8d08ac909acc2365e7fce8012ad899f58ed9d2 | [] | no_license | botanhuligan/sb_django | cada2f13eed0cb8992e0dfb1f04d2f0efa821fdd | eb2356422930d3e677fb5d30d6e975fb80117f03 | refs/heads/master | 2022-12-12T22:34:41.235419 | 2019-10-25T05:31:19 | 2019-10-25T05:31:19 | 217,235,102 | 0 | 0 | null | 2022-12-08T06:46:13 | 2019-10-24T07:11:39 | Python | UTF-8 | Python | false | false | 880 | py | # Generated by Django 2.2.6 on 2019-10-24 09:19
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0001_initial'),
]
operations = [
migrations.DeleteModel(
name='WifiPoint',
),
migrations.AddField(
model_name='place',
name='address',
field=models.TextField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='ticket',
name='points',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True),
),
migrations.AlterField(
model_name='ticket',
name='title',
field=models.TextField(max_length=255, verbose_name='Description'),
),
]
| [
"Voronov.A.D@sberbank.ru"
] | Voronov.A.D@sberbank.ru |
9685b85900c3d79f25474112f98dffe10dc358e7 | 2857b4803467716ea0a82e273266dc977c966150 | /RestAPI/bin/chardetect | 3009af493b84e6b043d05fd6a257ffc155cb511e | [
"Apache-2.0"
] | permissive | YogeshPatel951/GoogleAssistantRESTAPI | bbc1dd7407925f9fd2b5ba1b16d67e2682c74bb4 | fbaa59afc773baf6ad28631c35d3bf9d522b82cc | refs/heads/master | 2022-10-23T13:38:09.005470 | 2019-12-13T18:20:25 | 2019-12-13T18:20:25 | 219,119,121 | 0 | 0 | null | 2022-09-23T22:31:18 | 2019-11-02T07:24:32 | Python | UTF-8 | Python | false | false | 240 | #!/home/yogesh/GA-REST/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"patelyogesh951@gmail.com"
] | patelyogesh951@gmail.com | |
4e62172ec457daed38cbe1fd15671099a103f370 | 6b81f79367e2a889db46a437b17e77f620508c49 | /main.py | 3d4c7867838d9b9d6883896305d811b2ce0b4512 | [] | no_license | profitrul/my_site | aff5009d0d36a53b5a52d3ad03b17f4126903c40 | 2e194f7c9dc46beaf6b0c21914d0c4cd3e46688d | refs/heads/master | 2021-02-08T09:38:01.266062 | 2020-03-07T07:57:20 | 2020-03-07T07:57:20 | 244,137,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 424 | py | from flask import Flask , render_template,url_for,redirect
app = Flask(__name__)
@app.route("/")
@app.route("/<text>")
def hello_world(text = None):
if text == None:
return render_template('index.html',name = 'world')
else:
# text = text.split(',')
# return render_template('index.html', name=text)
return redirect(url_for('hello_world'))
if __name__ == '__main__':
app.run() | [
"Student@ukr.net"
] | Student@ukr.net |
f0d7131ecae8c4c754c7dd19a9a5c1ff2121cb3d | 95540a155c043dd84ea6c0fb7d59ba06dc78b875 | /python/算法和数据结构/queue.py | 5c7170aaa244a59b0964dfc397b87b675c7a5cb7 | [] | no_license | Lilenn/must | 41b95d8e80f48a6b82febb222936bbc3502cc01f | a510a8d0e58fde1bc97ab7ad9bd2738158dcba5e | refs/heads/master | 2020-04-09T23:09:20.116439 | 2018-12-06T09:02:09 | 2018-12-06T09:02:09 | 160,648,431 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 526 | py | class Queue(object):
'''队列'''
def __init__(self):
self.items = []
def is_pmpty(self):
return self.items == []
def enqueue(self,item):
'''建造队列'''
self.items.insert(1,item)
def dequeue(self):
'''出队列'''
return self.items.pop()
def size(self):
'''返回大小'''
return len(self.items)
if __name__ == '__main__':
q = Queue()
q.enqueue(1)
q.enqueue(3)
q.enqueue(5)
print(q.dequeue())
print(q.size()) | [
"846077763@qq.com"
] | 846077763@qq.com |
c366837c97fb326463ac57314027fdaaaa55c5a6 | ccd3a2c78ade7ac058a6cf5759824b5d03e835f3 | /quiz/admin.py | 0992a234b0cbba026875ca27fa4302e0da76e985 | [] | no_license | ess010203/crmef | fd0b5fced3f6021553b85bce6deb688f0a6ec049 | c697fa4901f78153c251cb54248f19bedd1c47f9 | refs/heads/master | 2023-06-10T21:48:58.819097 | 2021-07-11T00:21:06 | 2021-07-11T00:21:06 | 365,341,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | from django.contrib import admin
import nested_admin
from .models import Quiz , Question , Answer
class AnswerInline(nested_admin.NestedTabularInline):
model = Answer
extra = 4
max_num = 4
class QuestionInline(nested_admin.NestedTabularInline):
model = Question
inlines = [AnswerInline,]
extra = 5
class QuizAdmin(nested_admin.NestedModelAdmin):
inlines = [QuestionInline,]
admin.site.register(Quiz,QuizAdmin)
admin.site.register(Question)
admin.site.register(Answer)
| [
"ahmedEssaidiInf@gmail.com"
] | ahmedEssaidiInf@gmail.com |
f34f79c26e98c1d38d207de9b6cffc1f0ae6857e | 4503a3bfd940dce760b5f70e90e6fe2fe0cc4881 | /week10/health.py | 64a500edefc57c9f812bbfd48accca1bbc735e97 | [] | no_license | RicardoLima17/lecture | dba7de5c61507f51d51e3abc5c7c4c22ecda504f | b41f1201ab938fe0cab85566998390166c7fa7d8 | refs/heads/main | 2023-04-18T11:12:39.769760 | 2021-04-21T18:36:09 | 2021-04-21T18:36:09 | 334,456,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | # use person module
# Author: Andrew Beatty
from personmodule import *
import datetime as dt
person1 = {
'firstname': 'andrew',
'lastname': 'beatty',
'dob': dt.date(2010, 1, 1),
'height': 180,
'width': 100
}
# call the functions in the module
# I used import * so these have been imported
# so I can call them with out the module name
displayperson(person1)
gethealthdata(person1) | [
"limaricardooliveira@gmail.com"
] | limaricardooliveira@gmail.com |
17a7ac687a62150208d9b3fc7abab7a916ce08a5 | f5dd289bb6b333a7df9edfa105f19486a93f5be4 | /235/lowestCommonAncestor2.0.py | ccbf42257258b2ac3a74aae5616a4025d4bc914e | [
"Apache-2.0"
] | permissive | cccccccccccccc/Myleetcode | 0ab50ba5303ed08fac8402c87efedebc6bf67c16 | fb3fa6df7c77feb2d252feea7f3507569e057c70 | refs/heads/master | 2023-07-03T21:11:54.050252 | 2021-08-11T08:26:01 | 2021-08-11T08:26:01 | 115,370,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,388 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Mar 7 15:32:46 2018
@author: chen
"""
# Definition for a binary tree node.
"""
timecomplexity = O(n)
spacecomplexity = O(n)
base on this is a BST , we can make sure let if one node is p and qs' LCA, p<= node's value <=q
so construct recusive function to check
if node < p
continue check node.right
if node > q
continue check node.left
if node == p or q
if p<node<q
node is LCA
"""
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
def getancentor(node,l, r):
if not node:
return None
if node.val == l or node.val == r:
return node
if node.val < l:
return getancentor(node.right,l,r)
if node.val > r:
return getancentor(node.left,l, r)
if node.val> l and node.val < r:
return node
return getancentor(root, min(p.val,q.val),max(p.val,q.val))
a = TreeNode(6)
b = TreeNode(2)
c = TreeNode(8)
d = TreeNode(0)
e = TreeNode(4)
f = TreeNode(3)
g = TreeNode(5)
a.left = b
a.right = c
b.left = d
b.right = e
e.left = f
e.right = g
A = Solution()
print(A.lowestCommonAncestor(a,e,e)) | [
"chenzhang.1987@gmail.com"
] | chenzhang.1987@gmail.com |
d5784e85c294a3ceb940edb662c512c5cce077fd | b4bf2c9800f0cfc9560a261287ecf94d0b2b2f07 | /08_Admin/main.py | 3ab6a3d5dcbb3929d8465f2c4eedf7aa789de805 | [] | no_license | matrix11061991/formation-flask | 909ea9f251d73d520f149be3893044a48255df3d | 6264b8fa2fa2f042782b7f4117106128847e66ae | refs/heads/master | 2023-03-15T14:11:03.375862 | 2019-03-29T16:04:43 | 2019-03-29T16:04:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 800 | py | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_admin import Admin
from flask_admin.contrib.sqla import ModelView
app = Flask(__name__)
db = SQLAlchemy(app)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
def __repr__(self):
return '<User %r>' % self.username
# set optional bootswatch theme
app.config['FLASK_ADMIN_SWATCH'] = 'cerulean'
# Add administrative views here
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///database/ma_super_db.db'
admin = Admin(app, name='microblog', template_mode='bootstrap3')
admin.add_view(ModelView(User, db.session))
if __name__=='__main__':
app.run()
| [
"olivier.dulcy@gmail.com"
] | olivier.dulcy@gmail.com |
2deaacb5260e7014ecb172b7918accab1f797c61 | ccb878041c63a43c8c6192a8739676ab79ec5a22 | /Proj2/models.py | 948823303d32060cb4b1f246a60fc345bb4f3f47 | [] | no_license | DanyuWang/EE559-miniprojects | f5c1cc9bbfba0c5828b17f35289a05be30298b10 | 87346b1fe9799801215c6a29a3f81e105a85ee6c | refs/heads/master | 2022-07-05T21:59:47.396847 | 2020-05-19T10:10:57 | 2020-05-19T10:10:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,404 | py | """This file declares the models to be used for testing."""
from modules import Sequential, Linear, ReLU, Tanh, Sigmoid
MODEL1 = Sequential("ReLu",
Linear(2, 25), ReLU(),
Linear(25, 25), ReLU(),
Linear(25, 25), ReLU(),
Linear(25, 2), Sigmoid()
)
MODEL2 = Sequential("Tanh",
Linear(2, 25), Tanh(),
Linear(25, 25), Tanh(),
Linear(25, 25), Tanh(),
Linear(25, 2), Sigmoid()
)
MODEL3 = Sequential("ReLu + He",
Linear(2, 25, "He"), ReLU(),
Linear(25, 25, "He"), ReLU(),
Linear(25, 25, "He"), ReLU(),
Linear(25, 2, "He"), Sigmoid()
)
MODEL4 = Sequential("Tanh + Xavier",
Linear(2, 25, "Xavier"), Tanh(),
Linear(25, 25, "Xavier"), Tanh(),
Linear(25, 25, "Xavier"), Tanh(),
Linear(25, 2, "Xavier"), Sigmoid()
)
# Best model is actually almost model 2
MODEL_BEST = Sequential("Best",
Linear(2, 25), Tanh(),
Linear(25, 25), Tanh(),
Linear(25, 25), Tanh(),
Linear(25, 2, "He"), Sigmoid()
)
| [
"battleman@pop-os.localdomain"
] | battleman@pop-os.localdomain |
232ab722943cbc50ab67ebc8eec65ab8941b91a2 | 6f7f9bb3df6815ea7500d6219d7485728d4c2e54 | /mainapp/views/categories.py | a467629acd7e4a0f890b6afde2d6cc2f49dcc343 | [] | no_license | zakamaldin/geekshop | 5fd2fd6d6d6366bf30f4f51ec2d76fefa8d9ef76 | c13c309af7762df9d12f82b76a447c222593447e | refs/heads/master | 2021-07-06T08:41:53.026982 | 2019-02-05T18:57:22 | 2019-02-05T18:57:22 | 149,351,460 | 0 | 0 | null | 2019-05-03T16:12:42 | 2018-09-18T20:58:57 | Python | UTF-8 | Python | false | false | 3,668 | py | from django.http import HttpResponseRedirect
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import (
ListView, DetailView, CreateView, UpdateView, DeleteView
)
from authapp.mixins import AdminGroupRequired
from django.views.generic.edit import FormMixin
from django.urls import reverse_lazy
from mainapp.forms import ProductCategoryFormModel
from mainapp.models import ProductCategory
class ProductCategoryListView(ListView):
queryset = ProductCategory.objects.filter(is_active=True)
template_name = 'mainapp/category_list.html'
context_object_name = 'results'
paginate_by = 3
class ProductCategoryCreateView(LoginRequiredMixin, AdminGroupRequired, CreateView):
model = ProductCategory
template_name = 'mainapp/category_detail.html'
form_class = ProductCategoryFormModel
success_url = reverse_lazy('categories:category_list')
login_url = reverse_lazy('auth:login')
redirect_url = reverse_lazy('categories:category_list')
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super().get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['button'] = 'Add'
context['type'] = 'create'
return context
class ProductCategoryDetailView(FormMixin, DetailView):
queryset = ProductCategory.objects.filter(is_active=True)
template_name = 'mainapp/category_detail.html'
form_class = ProductCategoryFormModel
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(ProductCategoryDetailView, self).get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['form'] = ProductCategoryFormModel(instance=self.object)
context['button'] = 'Edit'
context['type'] = 'detail'
return context
class ProductCategoryUpdateView(LoginRequiredMixin, AdminGroupRequired, UpdateView):
model = ProductCategory
template_name = 'mainapp/category_detail.html'
form_class = ProductCategoryFormModel
success_url = reverse_lazy('categories:category_list')
login_url = reverse_lazy('auth:login')
redirect_url = reverse_lazy('categories:category_list')
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(ProductCategoryUpdateView, self).get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['form'] = ProductCategoryFormModel(instance=self.object)
context['button'] = 'Save'
context['type'] = 'edit'
return context
class ProductCategoryDeleteView(LoginRequiredMixin, AdminGroupRequired, DeleteView):
queryset = ProductCategory.objects.filter(is_active=True)
template_name = 'mainapp/category_detail.html'
form_class = ProductCategoryFormModel
success_url = reverse_lazy('categories:category_list')
redirect_url = reverse_lazy('categories:category_list')
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(ProductCategoryDeleteView, self).get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['form'] = ProductCategoryFormModel(instance=self.object)
context['button'] = 'Yes, delete'
context['type'] = 'delete'
return context
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
self.object.is_active = False
self.object.save()
return HttpResponseRedirect(self.get_success_url())
| [
"zakamaldin.andrey@gmail.com"
] | zakamaldin.andrey@gmail.com |
f9eea8146725159dab91cb85be007a736b9ea8f7 | a1dc93f026a931dc29d8c5d411201ed276460198 | /blog/models.py | 752d78e80fe9988155497b72b2a98a25e212974d | [] | no_license | EricLULU/blog | 44116497e6bba6be3bd7c558403c0a2c6867e880 | 99ad323ba661787518983416cc30565749cab1d2 | refs/heads/master | 2020-03-27T06:57:46.441258 | 2018-08-26T04:42:15 | 2018-08-26T04:42:15 | 146,151,365 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,059 | py | from __future__ import unicode_literals
from django.db import models
from DjangoUeditor.models import UEditorField
# Create your models here.
# Create your models here.
class Article(models.Model):
title = models.CharField(u"博客标题",max_length = 100) #博客标题
category = models.CharField(u"博客标签",max_length = 50,blank = True) #博客标签
pub_date = models.DateTimeField(u"发布日期",auto_now_add = True,editable=True) #博客发布日期
update_time = models.DateTimeField(u'更新时间',auto_now=True,null=True)
#content = models.TextField(blank=True, null=True) # 博客文章正文
content = UEditorField(u"文章正文",height=300,width=1000,default=u'',blank=True,imagePath="uploads/blog/images/",
toolbars='besttome',filePath='uploads/blog/files/')
def __unicode__(self):
return self.title
class Meta: #按时间下降排序
ordering = ['-pub_date']
verbose_name = "文章"
verbose_name_plural = "文章" | [
"2899325627@qq.com"
] | 2899325627@qq.com |
3488315b430b258804d4cecc4627b302cb4d17c7 | f8e71d381159a51f192be9ade66d0ae20ddbe0f1 | /game.py | 3acf9cf0b1e5580a1b3e2e64f10f5c927401fd0b | [] | no_license | jdonnal/lab2 | 7266a5b8d5432d96a01244bcde77775eedd98a1c | 9d95cdb56804d17d4db0ba46fe6a652a58c990e2 | refs/heads/master | 2021-03-16T08:42:43.662338 | 2018-02-20T18:39:41 | 2018-02-20T18:39:41 | 119,406,873 | 0 | 0 | null | 2018-02-25T21:17:46 | 2018-01-29T16:13:25 | Python | UTF-8 | Python | false | false | 3,566 | py | #!/usr/bin/python
# import system modules
import pygame,sys
from pygame.locals import *
import random
import time
import serial
# import custom modules
import launcher
import target
import rock
from colors import *
# tunable constants
HEIGHT=400
WIDTH=500
FPS = 30
TARGET_WIDTH = 50
def main():
# start up pygame and build a game window
pygame.init()
fpsClock=pygame.time.Clock()
window = pygame.display.set_mode((WIDTH,HEIGHT),0,32)
pygame.display.set_caption('Launchr')
s = serial.Serial("/dev/ttyACM1",timeout=0.5)
# create custom objects
my_launcher = launcher.Launcher(0,HEIGHT-20)
my_rock = rock.Rock(0,HEIGHT-20)
my_target = target.Target((random.random()*280)+50, HEIGHT-20,
TARGET_WIDTH)
objs = [my_launcher, my_rock, my_target]
# Main game loop
while(True):
# 1 Process Events
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
"""arduino controls launcher mag/angle
if event.key == pygame.K_UP:
my_launcher.changeAngle(3)
if event.key == pygame.K_DOWN:
my_launcher.changeAngle(-3)
if event.key == pygame.K_RIGHT:
my_launcher.changeMag(5)
if event.key == pygame.K_LEFT:
my_launcher.changeMag(-5)
"""
if ((event.key == pygame.K_SPACE) and
not my_rock.isMoving()):
my_launcher.fire(my_rock)
if event.type == QUIT:
pygame.quit()
sys.exit()
# 2 Update Game State
# read from arduino
s.write('p')
str_data = s.readline()
if(len(str_data)>0):
data = [int(x) for x in str_data.split(',')]
my_launcher.setAngle((data[0]/1024.0)*90)
my_launcher.setMag((data[1]/1024.0)*100)
my_rock.move(1.0/FPS) # force floating point division
if(my_rock.y>HEIGHT):
# rock is below the screen
my_rock.moveTo(0,HEIGHT-20)
s.write('r')
displayMessage(window,"Miss!")
if(my_target.hitBy(my_rock.getRect())):
# rock hit the target!
my_rock.moveTo(0,HEIGHT-20)
s.write('g')
displayMessage(window,"Hit!")
my_target.moveTo((random.random()*280)+50)
# 3 Update Display
drawWorld(window)
for obj in objs:
obj.draw(window)
pygame.display.update()
fpsClock.tick(FPS)
def drawWorld(surf):
# erase surface with a fill
surf.fill(SKY_BLUE)
# add in some grass
grass_rect=(0,HEIGHT-20,WIDTH,20)
pygame.draw.rect(surf,GRASS_GREEN,grass_rect)
# write the game title
fontObj = pygame.font.Font('freesansbold.ttf',32)
textSurfaceObj = fontObj.render('Launchr 1.0',
True, BLACK)
textRectObj = textSurfaceObj.get_rect()
textRectObj.center = (WIDTH/2,20)
surf.blit(textSurfaceObj,textRectObj)
def displayMessage(surf, msg):
# display [msg] for 1 second (freezes the game)
fontObj = pygame.font.Font('freesansbold.ttf',40)
textSurfaceObj = fontObj.render(msg,
True, ORANGE)
textRectObj = textSurfaceObj.get_rect()
textRectObj.center = (WIDTH/2,HEIGHT/2)
surf.blit(textSurfaceObj,textRectObj)
pygame.display.update()
time.sleep(1)
if __name__=="__main__":
main()
| [
"donnal@usna.edu"
] | donnal@usna.edu |
7b4eec2745a3598c581f6fe69e36c47233072b3d | a297d72afd7b3a31afa0ec3e841b33e6116cb4ee | /timefixer.py | 47e455f921fca246bd72563a2c62ee6b64011fa5 | [] | no_license | s0ph0s-2/mp3-chapter-scripts | 0165641a76173bf2e0637049ed95a5ec6d930ac7 | c51d76555be44d3d961cd211c680c2ec1f592825 | refs/heads/master | 2021-04-28T08:03:16.662085 | 2018-07-14T20:59:54 | 2018-07-14T20:59:54 | 122,237,964 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,952 | py | #!/usr/local/bin/python3
# Convert output from log2json.py into the format required by chaptagger.py
import json
import sys
import os
import datetime
INTRO_LENGTH = 105670
# Do the actual conversion
def convert_to_deltas(json_object, time_mode):
# Initialize an array to hold the output
deltas = []
# Initialize a variable to hold the start time
intro_time = datetime.timedelta
# Initialize a boolean to hold the found_intro state
found_intro = False
# Set time format string based on time_mode
time_fmt_str = ""
if time_mode == "weechat":
time_fmt_str = "%Y-%m-%d %H:%M:%S"
elif time_mode == "znc":
time_fmt_str = "%H:%M:%S"
elif time_mode == "textual":
time_fmt_str = "%Y-%m-%dT%H:%M:%S%z"
intro_index = -1
for i in range(0, len(json_object)):
if json_object[i][0].startswith("fnt-"):
intro_index = i
# If we have an intro track, use it as the first time.
if intro_index >= 0:
intro_time = datetime.datetime.strptime(
json_object[intro_index][1],
time_fmt_str
)
# Otherwise, assume the user has removed erroneous tracks from the start of
# the tracklist and use the first track (minus the length of the intro) as
# the starting time.
else:
intro_time = datetime.datetime.strptime(
json_object[0][1],
time_fmt_str
) - datetime.timedelta(milliseconds=INTRO_LENGTH)
# For each datapoint in the json object, do the following
start = intro_index if intro_index > 0 else 0
for datapoint in json_object[start:]:
# Calculate the time delta
timeobj = datetime.datetime.strptime(datapoint[1], time_fmt_str)
# Handle next-day times
if datapoint[1].startswith("00"):
timeobj = timeobj + datetime.timedelta(days=1)
timedelta = timeobj - intro_time
# Append the object to the list
deltas.append([datapoint[0], timedelta.total_seconds() * 1000])
# Return the list of titles with times
return deltas
# Main method
def main(directory, time_mode):
# If the path specified is a directory
if os.path.isdir(directory):
# Get all of the files in it
files = [f for f in os.listdir(directory)
if os.path.isfile(os.path.join(directory, f))]
# For each file,
for filename in files:
# Check to see if it ends with ".json"
if filename.endswith(".json"):
# If it does, open the file and read its contents into
# json_object, then process them
deltas = []
with open(os.path.join(directory, filename), "r") as filedata:
json_object = json.load(filedata)
try:
deltas = convert_to_deltas(json_object, time_mode)
except IndexError:
print("Starting title not found in", filename)
# Then, write the processed data out to the same filename,
# but with ".fix.json"
with open(os.path.join(
directory,
filename[:-5] + ".fix.json"
), "w") as outfile:
json.dump(deltas, outfile)
# If it's the main method, read sys.argv and pass it to main
if __name__ == "__main__":
directory = sys.argv[1]
time_mode = ""
if len(sys.argv) > 2:
if sys.argv[2] == "--weechat":
print("Setting time mode to weechat.")
time_mode = "weechat"
elif sys.argv[2] == "--znc":
print("Setting time mode to znc.")
time_mode = "znc"
elif sys.argv[2] == "--textual":
print("Setting the time mode to textual.")
time_mode = "textual"
else:
print("Setting time mode to znc.")
time_mode = "znc"
main(directory, time_mode)
| [
"alexander.g.langley@gmail.com"
] | alexander.g.langley@gmail.com |
de41c1cbfa2002f38b52782617d66e63d0be9ce9 | b00bd51714e3aafedd7201034404044117164649 | /planet.py | b4f911e92ffc48a452e7833048ae93e4f3817373 | [] | no_license | jjdelc/yt-aggregator | 8a0415f0fe8851414af9bcb3d4b1f0800c199a5a | 2954f160174f74d74d7928bfdd858d24a13a8ac0 | refs/heads/master | 2021-07-12T03:50:22.962196 | 2020-12-28T06:15:42 | 2020-12-28T06:15:42 | 225,235,876 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,659 | py | import sys
from os.path import basename
from datetime import datetime
from urllib.parse import urlparse
from urllib.request import urlopen
import xml.etree.ElementTree as ET
CSS = """
body {
padding: 0;
margin: 0;
font-family: sans-serif;
}
.content {
max-width: 40rem;
margin: 0 auto;
padding: 0;
}
.main-feed {
list-style: None;
}
.main-feed li {
margin-bottom: 0.5rem;
border: 1px solid #EEE;
}
.main-feed a {
display: grid;
grid-template-columns: 40% auto;
text-decoration: none;
color: #444;
}
.preview {
display: block;
max-width: 100%;
height: auto;
}
.video-info {
padding: 0 0.5rem;
box-sizing: border-box;
}
.video-info h1 {
font-size: 1rem;
}
.author {
font-size: 0.9rem;
}
.video-info datetime {
font-size: 0.75rem;
font-style: italic;
color: #AAA;
}
#doFilter {
display: grid;
grid-template-columns: 80% auto;
margin: 5px auto;
}
#filterField {
line-height: 1.4rem;
}
header {
text-align: right;
margin: 0;
}
header a {
text-decoration: none;
margin: 0;
font-size: 0.75rem;
}
header h1 {
font-size: 1.2rem;
line-height: 190%;
text-align: left;
color: #555;
}
"""
JS = """
const doFilter = document.getElementById("doFilter");
const filterField = document.getElementById("filterField");
const videoList = document.getElementById("videoList");
doFilter.onsubmit = () => {
const searchText = filterField.value.toLowerCase();
const emptyCb = el => {
el.style.display = "block";
}
const searchCb = el => {
if (el.dataset.search.toLowerCase().includes(searchText)) {
el.style.display = "block";
} else {
el.style.display = "none";
}
}
const callback = !!searchText?searchCb:emptyCb;
[...videoList.children].forEach(callback);
return false;
};
"""
TITLE = "Transformers toy reviews"
DESCRIPTION = "Feed of Youtube Transformers reviewers"
CHANNELS = [
"UCTitdGNU65UUwEG75sWoLEA", # Cybertron 21
"UC90avc9FvlyP1PTH0QXluKg", # peolesdru
"UCt6y380FcB6E4rdLae4o2sA", # Peaugh
"UCVEDzEYSNi5FwEKmLY6LT0Q", # Emgo316
"UCAPhTcmL-a69jL1KWOPe7GA", # Chris McFeely
"UCTawVEKc2B7x23jwy_95oJg", # Optibotimus
"UCUChzcoFWnmErS2U-kUOu4w", # Tonton
"UCJVOioJ7oEFRq7pzVMZKa3A", # Chefatron
"UCvTEl-XHO9BY3_I1Zssfmjg", # Patriot Prime
"UClOs-_ohni8SfWRS6glsGIQ", # Lazy Eyebrow
"UCP3AVTw_73U_8jMzLw5TbEg", # Thew adams
"UCSavbCuXmMyRzN0mjwB1uEQ", # Vangelus
"UCx7b66RpQRLuOXvItxy0Wxg", # Transformers official
"UCbQhXIBPC_1DbUDn0IdhRrQ", # Cybertronian Beast
"UCmXbBLj_Iv1ElpJ7LIABdJQ", # Ballmatrix
"UC8tA9aqVj587UzMmW7YTM4w", # Knerdout
"UCzSYHWdPoWrA3BqI5JoHZ5g", # Chosen Prime
"UCKl_L5vnewbZNMp8YutEE9Q", # TM Reviews
"UCEOvklHZd3m5qNxtMis4zRQ", # RodimusPrimal
"UCi7KR0GzS7veFC6IGKuYHKg", # Transformers ENI
"UCoAIXvNjN5bYzMYNce0M3uw", # Bens Collectables
"UC1Dt3QcVWHBIMYSjuZWzJNA", # Toyhax
"UCnWxXRUOv5zvqbNiXzUNN8A", # A3u
"UCzo0rWyQdCznorrvd-OzEBA", # Masterpiece Reanimated
"UCA5e8vqo-aJpZtd7EEDEuDg", # Transform My Transformers
"UCvU4TcenqcDTrkxaqGEedfw", # Starscreamer
"UC70F5cxWj0AeZfbbjJg-eGg", # JobbytheHong
"UCVRX-xxa69loL7-Ac2vAF_w", # PrimeVsPrime
"UCF3H90k_0pxZfPIo6Dv99tg", # UltraPrimal
"UCY8cdfayYVQc4hqrSWXu4yQ", # RobsteinOne
"UCbhS8EgR7Z5RVe-an1jRLiQ", # PandaJack Productions
"UCxQyiTDqGlKysClRade7opA", # Ome9asupreme
"UC64sMcUNRWAUlKeEceZOlRA", # Gavin Spence
"UC8ahRx5Pg4sXvSYS6wgaU8g", # Sixo
"UCRpcYdbTlk3bZsa1EyCFQzg", # Larkin's Lair
]
RSS_FEED = "https://www.youtube.com/feeds/videos.xml?channel_id={}"
RSS_URL = "https://www.youtube.com/channel/{}/"
ENTRY_TAG = "{http://www.w3.org/2005/Atom}entry"
AUTHOR_TAG = "{http://www.w3.org/2005/Atom}author"
PREVIEW_IMG = "https://i.ytimg.com/vi/{}/hqdefault.jpg"
WATCH_URL = "https://www.youtube.com/watch?v={}"
MAX_ENTRIES = 100
FEED_ITEM = """<li data-search="{search}">
<a href="{link}">
<img src="{image}" loading="lazy" class="preview"/>
<div class="video-info">
<h1>{title}</h1>
<p class="author">{author}</p>
<datetime>{published}</datetime>
</div>
</a>
</li>"""
BASE_HTML = """<!doctype html>
<html>
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, user-scalable=yes, initial-scale=1" />
<meta name="description" content="{description}"/>
<title>{title}</title>
<style>{css}</style>
</head>
<body>
<header>
<a href="https://www.lostlight.net/" target="_blank">Lostlight - Personal Transformers inventory</a>
<h1 class="content">Transformers reviews</h1>
</header>
<form class="content" id="doFilter">
<input id="filterField" type="search" placeholder="Search videos"/> <input type="submit" value="Search"/>
</form>
<ol class="main-feed content" id="videoList">{feed}</ol>
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-86526390-2"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){{dataLayer.push(arguments);}}
gtag('js', new Date());
gtag('config', 'UA-86526390-2');
</script>
<script>
{js}
</script>
</body>
</html>
"""
def parse_tagname(tag):
return tag.split("}", 1)[1]
def read_entry(entry, author):
result = {parse_tagname(e.tag): e.text for e in entry}
result["author"] = author
result["published"] = datetime.strptime(result['published'][:19], "%Y-%m-%dT%H:%M:%S")
return result
def process_entry(entry):
return {
"title": entry["title"],
"image": PREVIEW_IMG.format(entry["videoId"]),
"link": WATCH_URL.format(entry["videoId"]),
"published": entry["published"].strftime("%b %d"),
"author": entry["author"]["name"],
"search": "{} {}".format(entry["title"], entry["author"]["name"])
}
def feed_author(root):
author = [a for a in root if a.tag == AUTHOR_TAG][0]
result = {parse_tagname(e.tag): e.text for e in author}
return result
def entry_2_html(entry):
return FEED_ITEM.format(**process_entry(entry))
def main():
output = sys.argv[1]
all_entries = []
for channel_id in CHANNELS:
rss = urlopen(RSS_FEED.format(channel_id)).read()
root = ET.fromstring(rss)
author = feed_author(root)
entries = [read_entry(e, author) for e in root if e.tag == ENTRY_TAG]
all_entries.extend(entries)
print("Processing {} - {} videos".format(author["name"], len(entries)))
all_entries = reversed(sorted(all_entries, key=lambda e: e["published"]))
html_entries = [entry_2_html(e) for e in list(all_entries)[:MAX_ENTRIES]]
html = BASE_HTML.format(
title=TITLE,
feed="\n".join(html_entries),
description=DESCRIPTION,
css=CSS,
js=JS
)
with open(output, "w") as fh:
fh.write(html)
if __name__ == "__main__":
main()
| [
"jjdelc@gmail.com"
] | jjdelc@gmail.com |
8ecbc20cd751993e396cb818b5101fcf02f432c0 | 21189f46ab7d28e196541547e54da3cd5ccb5e02 | /home/bin/i3-rename-workspace | 6dc685a1dc28cb0e6a0fe0118eb1cce17a47acff | [] | no_license | tscolari/dotfiles-1 | 7e0c97cc25b5704625b7f5c6922c200269cf906b | f6dfe6c656e535fcb71cde23c1bf8699548fc05a | refs/heads/master | 2021-02-28T08:57:16.896577 | 2020-02-02T02:44:04 | 2020-02-02T02:49:35 | 69,583,636 | 0 | 0 | null | 2016-09-29T15:58:00 | 2016-09-29T15:57:59 | null | UTF-8 | Python | false | false | 8,651 | #!/usr/bin/env python3
#
# github.com/justbuchanan/i3scripts
#
# This script listens for i3 events and updates workspace names to show icons
# for running programs. It contains icons for a few programs, but more can
# easily be added by editing the WINDOW_ICONS list below.
#
# It also re-numbers workspaces in ascending order with one skipped number
# between monitors (leaving a gap for a new workspace to be created). By
# default, i3 workspace numbers are sticky, so they quickly get out of order.
#
# Dependencies
# * xorg-xprop - install through system package manager
# * i3ipc - install with pip
# * fontawesome - install with pip
#
# Installation:
# * Download this repo and place it in ~/.config/i3/ (or anywhere you want)
# * Add "exec_always ~/.config/i3/i3scripts/autoname_workspaces.py &" to your i3 config
# * Restart i3: $ i3-msg restart
#
# Configuration:
# The default i3 config's keybindings reference workspaces by name, which is an
# issue when using this script because the "names" are constantly changing to
# include window icons. Instead, you'll need to change the keybindings to
# reference workspaces by number. Change lines like:
# bindsym $mod+1 workspace 1
# To:
# bindsym $mod+1 workspace number 1
import argparse
import i3ipc
import logging
import signal
import sys
import fontawesome as fa
from util import *
# Add icons here for common programs you use. The keys are the X window class
# (WM_CLASS) names (lower-cased) and the icons can be any text you want to
# display.
#
# Most of these are character codes for font awesome:
# http://fortawesome.github.io/Font-Awesome/icons/
#
# If you're not sure what the WM_CLASS is for your application, you can use
# xprop (https://linux.die.net/man/1/xprop). Run `xprop | grep WM_CLASS`
# then click on the application you want to inspect.
WINDOW_ICONS = {
'alacritty': fa.icons['terminal'],
'atom': fa.icons['atom'],
'authy': fa.icons['shield-alt'],
'banshee': fa.icons['play'],
'blender': fa.icons['cube'],
'chromium': fa.icons['chrome'],
'cura': fa.icons['cube'],
'darktable': fa.icons['image'],
'discord': fa.icons['discord'],
'eclipse': fa.icons['code'],
'emacs': fa.icons['code'],
'eog': fa.icons['image'],
'evince': fa.icons['file-pdf'],
'evolution': fa.icons['envelope'],
'feh': fa.icons['image'],
'file-roller': fa.icons['compress'],
'firefox': fa.icons['firefox'],
'firefox-esr': fa.icons['firefox'],
'gimp-2.8': fa.icons['image'],
'gnome-control-center': fa.icons['toggle-on'],
'gnome-terminal-server': fa.icons['terminal'],
'google-chrome': fa.icons['chrome'],
'gpick': fa.icons['eye-dropper'],
'imv': fa.icons['image'],
'java': fa.icons['code'],
'jetbrains-studio': fa.icons['code'],
'keybase': fa.icons['key'],
'kicad': fa.icons['microchip'],
'kitty': fa.icons['terminal'],
'libreoffice': fa.icons['file-alt'],
'lua5.1': fa.icons['moon'],
'mpv': fa.icons['tv'],
'mupdf': fa.icons['file-pdf'],
'mysql-workbench-bin': fa.icons['database'],
'nautilus': fa.icons['copy'],
'nemo': fa.icons['copy'],
'openscad': fa.icons['cube'],
'parsecd': fa.icons['gamepad'],
'pavucontrol': fa.icons['volume-up'],
'postman': fa.icons['space-shuttle'],
'rhythmbox': fa.icons['play'],
'signal': fa.icons['comment-dots'],
'slack': fa.icons['slack'],
'slic3r.pl': fa.icons['cube'],
'spotify': fa.icons['spotify'], # could also use the 'spotify' icon
'station': fa.icons['comments'],
'steam': fa.icons['steam'],
'subl': fa.icons['file-alt'],
'subl3': fa.icons['file-alt'],
'sublime_text': fa.icons['file-alt'],
'thunar': fa.icons['copy'],
'thunderbird': fa.icons['envelope'],
'totem': fa.icons['play'],
'urxvt': fa.icons['terminal'],
'xfce4-terminal': fa.icons['terminal'],
'xournal': fa.icons['file-alt'],
'yelp': fa.icons['code'],
'zenity': fa.icons['window-maximize'],
'zoom': fa.icons['video'],
}
# This icon is used for any application not in the list above
DEFAULT_ICON = fa.icons['window-maximize']
SEPARATOR = "|"
# Global setting that determines whether workspaces will be automatically
# re-numbered in ascending order with a "gap" left on each monitor. This is
# overridden via command-line flag.
RENUMBER_WORKSPACES = False
def ensure_window_icons_lowercase():
for cls in WINDOW_ICONS:
if cls != cls.lower():
WINDOW_ICONS[cls.lower()] = WINDOW_ICONS[cls]
del WINDOW_ICONS[cls]
def icon_for_window(window):
# Try all window classes and use the first one we have an icon for
names = xprop(window.window, 'WM_NAME')
if names != None and len(names) > 0:
for name in names:
name = name.lower() # case-insensitive matching
if name in WINDOW_ICONS:
return WINDOW_ICONS[name]
classes = xprop(window.window, 'WM_CLASS')
if classes != None and len(classes) > 0:
for cls in classes:
cls = cls.lower() # case-insensitive matching
if cls in WINDOW_ICONS:
return WINDOW_ICONS[cls]
logging.info(
'No icon available for window with classes: %s' % str(classes))
return DEFAULT_ICON
def icon_for_window_with_pango_markup(window):
icon = icon_for_window(window)
if window.focused:
icon = "<span face='Font Awesome 5 Free' color='#ffffff'>%s</span>" % icon
else:
icon= "<span face='Font Awesome 5 Free' alpha='50%%'>%s</span>" % icon
i3.command('[id=%s] title_format %s %s <small>%%title</small>' % (window.window, icon, SEPARATOR))
return icon
def rename_workspace(old, new):
i3.command('rename workspace "%s" to "%s"' % (old, new))
# renames all workspaces based on the windows present
# also renumbers them in ascending order, with one gap left between monitors
# for example: workspace numbering on two monitors: [1, 2, 3], [5, 6]
def rename_workspaces(i3):
ws_infos = i3.get_workspaces()
prev_output = None
n = 1
for ws_index, workspace in enumerate(i3.get_tree().workspaces()):
ws_info = ws_infos[ws_index]
name_parts = parse_workspace_name(workspace.name)
# new_icons = ' '.join([icon_for_window_with_markup(w) for w in workspace.leaves()])
new_icons = ' '.join([icon_for_window(w) for w in workspace.leaves()])
# As we enumerate, leave one gap in workspace numbers between each monitor.
# This leaves a space to insert a new one later.
if ws_info.output != prev_output and prev_output != None:
n += 1
prev_output = ws_info.output
# optionally renumber workspace
new_num = n if RENUMBER_WORKSPACES else name_parts.num
n += 1
new_name = construct_workspace_name(
NameParts(
num=new_num,
shortname=name_parts.shortname, icons=new_icons))
if workspace.name == new_name:
continue
rename_workspace(workspace.name, new_name)
# Rename workspaces to just numbers and shortnames, removing the icons.
def on_exit(i3):
for workspace in i3.get_tree().workspaces():
name_parts = parse_workspace_name(workspace.name)
new_name = construct_workspace_name(
NameParts(
num=name_parts.num, shortname=name_parts.shortname,
icons=None))
if workspace.name == new_name:
continue
rename_workspace(workspace.name, new_name)
i3.main_quit()
sys.exit(0)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=
"Rename workspaces dynamically to show icons for running programs.")
parser.add_argument(
'--norenumber_workspaces',
action='store_true',
default=True,
help=
"Disable automatic workspace re-numbering. By default, workspaces are automatically re-numbered in ascending order."
)
args = parser.parse_args()
RENUMBER_WORKSPACES = not args.norenumber_workspaces
logging.basicConfig(level=logging.INFO)
ensure_window_icons_lowercase()
i3 = i3ipc.Connection()
# Exit gracefully when ctrl+c is pressed
for sig in [signal.SIGINT, signal.SIGTERM]:
signal.signal(sig, lambda signal, frame: on_exit(i3))
rename_workspaces(i3)
# Call rename_workspaces() for relevant window events
def event_handler(i3, e):
if e.change in ['new', 'close', 'move', 'focus']:
rename_workspaces(i3)
i3.on('window', event_handler)
i3.on('workspace::move', event_handler)
i3.main()
| [
"cfcluan@gmail.com"
] | cfcluan@gmail.com | |
80d7750f7f977b876f0ce61427fcd1932f7c6f2f | 2fd6c260b8db490ed9dc594f2a6578bb736cb9ad | /src/test-apps/happy/tests/standalone/wdmNext/test_weave_wdm_next_one_way_subscribe_16.py | a4184ef2423b5b02d1874dbcd0e40ca97546c89f | [
"LicenseRef-scancode-proprietary-license",
"Apache-2.0"
] | permissive | pornin/openweave-core | 6891a89b493566e24c4e413f6425ecbf59663a43 | b6ac50aad6eb69c7a81c9916707f3c7ef098ec63 | refs/heads/master | 2020-04-02T00:55:05.328569 | 2018-10-19T17:28:34 | 2018-10-19T17:28:34 | 153,828,148 | 1 | 0 | Apache-2.0 | 2018-10-19T18:52:53 | 2018-10-19T18:52:53 | null | UTF-8 | Python | false | false | 3,051 | py | #!/usr/bin/env python
#
# Copyright (c) 2016-2017 Nest Labs, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# @file
# Calls Weave WDM one way subscribe between nodes.
# H03: One way Subscribe: Client Continuous Events. Client cancels
# L09: Stress One way Subscribe: Client Continuous Events. Client cancels
#
import unittest
import set_test_path
from weave_wdm_next_test_base import weave_wdm_next_test_base
import WeaveUtilities
class test_weave_wdm_next_one_way_subscribe_16(weave_wdm_next_test_base):
def test_weave_wdm_next_one_way_subscribe_16(self):
wdm_next_args = {}
wdm_next_args['wdm_option'] = "one_way_subscribe"
wdm_next_args['total_client_count'] = 4
wdm_next_args['final_client_status'] = 0
wdm_next_args['timer_client_period'] = 16000
wdm_next_args['test_client_iterations'] = 5
wdm_next_args['test_client_delay'] = 2000
wdm_next_args['enable_client_flip'] = 0
wdm_next_args['total_server_count'] = 4
wdm_next_args['final_server_status'] = 4
wdm_next_args['timer_server_period'] = 15000
wdm_next_args['test_server_delay'] = 0
wdm_next_args['enable_server_flip'] = 0
wdm_next_args['server_event_generator'] = 'Security'
wdm_next_args['server_inter_event_period'] = 2000
wdm_next_args['client_log_check'] = [('Client\[0\] \[(ALIVE|CONFM)\] EndSubscription Ref\(\d+\)', wdm_next_args['test_client_iterations']),
('Client\[0\] \[CANCL\] _AbortSubscription Ref\(\d+\)', wdm_next_args['test_client_iterations'])]
wdm_next_args['server_log_check'] = [('Handler\[0\] \[(ALIVE|CONFM)\] CancelRequestHandler', wdm_next_args['test_client_iterations']),
('Handler\[0\] Moving to \[ FREE\] Ref\(0\)', wdm_next_args['test_client_iterations'])]
wdm_next_args['test_tag'] = self.__class__.__name__[19:].upper()
wdm_next_args['test_case_name'] = ['H03: One way Subscribe: Publisher Continuous Events. Client cancels',
'L09: Stress One way Subscribe: Publisher Continuous Events. Client cancels']
print 'test file: ' + self.__class__.__name__
print "weave-wdm-next test B03 and L09"
super(test_weave_wdm_next_one_way_subscribe_16, self).weave_wdm_next_test_base(wdm_next_args)
if __name__ == "__main__":
WeaveUtilities.run_unittest()
| [
"rszewczyk@nestlabs.com"
] | rszewczyk@nestlabs.com |
8ed0afb59ea8f91c6ed1dade308446d49b35f9f7 | 1edcd7f8dbca3de00e7cccb190ee1f4c78ca7cd6 | /stockphoto/views.py | f72147e9745aa7540c8b1a539472b91b2b70fff0 | [] | no_license | opendream/openkala | 24c54fae76bb12d10e41a90b977c60a0c920654e | c890748c7a7804e8eaf63d5ecb258c601a5bc292 | refs/heads/master | 2020-04-21T21:54:17.455375 | 2011-07-19T04:21:43 | 2011-07-19T04:21:43 | 1,648,780 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,604 | py | """
views.py --- non-generic views used by stockphoto
This file is a part of stockphoto, a simple photogallery app for
Django sites.
Copyright (C) 2006 Jason F. McBrayer <jmcbray-django@carcosa.net>
Copyright (C) 2006 William McVey <wamcvey@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
"""
# Create your views here.
# django imports
from django.conf import settings
from django import forms, http, template
from django.contrib.auth.decorators import login_required, permission_required
from django.shortcuts import get_object_or_404, render_to_response
from django.http import HttpResponse
from django.template.context import RequestContext
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.views.generic.list_detail import object_list, object_detail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.files import File
# other imports
import zipfile
import os
import stat
import shutil
from datetime import datetime
from tempfile import NamedTemporaryFile, mkdtemp
from PIL import Image
from PIL.ExifTags import TAGS
from datetime import datetime
from quarter.views import project_overview, project_getmode_helper
from quarter.models import Project
import utility
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# Handling settings here
try:
STOCKPHOTO_BASE = settings.STOCKPHOTO_BASE.strip('/')
except AttributeError:
STOCKPHOTO_BASE = 'stockphoto'
def get_exif(fn):
ret = {}
i = Image.open(fn)
try:
info = i._getexif()
if not info:
return {}
except:
return {}
for tag, value in info.items():
decoded = TAGS.get(tag, tag)
ret[decoded] = value
return ret
# models
from models import Gallery, Photo
# views
def photo_add(request, project_id, gallery_id):
filename = request.META['HTTP_X_FILE_NAME']
filesize = request.META['HTTP_X_FILE_SIZE']
filetype = request.META['HTTP_X_FILE_TYPE']
if filetype.split('/')[0] == 'image':
im = SimpleUploadedFile(filename, request._raw_post_data, filetype)
if gallery_id:
gallery = Gallery.objects.get(id=gallery_id)
else:
project = Project.objects.get(id=project_id)
gallery = Gallery(project=project, title=request.META['HTTP_X_GALLERY_TITLE'], created=request.user)
gallery.save()
photo = Photo(image=File(im), desc=filename, gallery=gallery)
photo.save()
return HttpResponse('/projects/%s/stockphoto/%s' % (str(project_id), str(gallery.id)))
def gallery_list(request, project_id):
if 'HTTP_X_FILE_NAME' in request.META:
return photo_add(request, project_id, None)
elif not request.GET.get('ajax'):
return project_overview(request, project_id)
# Mode
mode = project_getmode_helper(request, project_id)
is_view_mode = mode == 'view'
return object_list(
request,
Gallery.objects.filter(project__id=project_id).order_by('-id'),
template_object_name='gallery',
template_name='stockphoto/gallery_list.html',
paginate_by=15,
extra_context={'project_id': project_id, 'is_view_mode': is_view_mode}
)
def gallery_add(request, project_id):
if request.GET.get('ignore'):
return HttpResponse('')
project = Project.objects.get(id=project_id)
gallery = Gallery(project=project, title=request.META['HTTP_X_GALLERY_TITLE'], created=request.user)
gallery.save()
return HttpResponse('/projects/%s/stockphoto/%s' % (str(project_id), str(gallery.id)))
def gallery_detail(request, project_id, gallery_id):
if 'HTTP_X_FILE_NAME' in request.META:
return photo_add(request, project_id, gallery_id)
elif not request.GET.get('ajax'):
return project_overview(request, project_id)
# Mode
mode = project_getmode_helper(request, project_id)
is_view_mode = mode == 'view'
plan_tags_form = utility.plan_tags_form(project_id, Gallery.objects.get(id=gallery_id), 10)
return object_detail(
request,
Gallery.objects.all(),
object_id=gallery_id,
template_object_name='gallery',
template_name='stockphoto/gallery_detail.html',
extra_context={'project_id': project_id, 'is_view_mode': is_view_mode, 'plan_tags_form':plan_tags_form}
)
def gallery_delete(request, project_id, gallery_id):
if not request.GET.get('ajax'):
return project_overview(request, project_id)
try:
gallery = Gallery.objects.get(id=gallery_id)
except:
pass
#return HttpResponse('')
gallery.delete()
return gallery_list(request, project_id)
def photo_detail(request, project_id, photo_id):
if not request.GET.get('ajax'):
return project_overview(request, project_id)
photo = Photo.objects.get(id=photo_id)
info = get_exif(photo.fullurl()[1:])
if info:
dt = info.get('DateTime') or info.get('DateTimeOriginal') or info.get('DateTimeDigitized')
if dt:
dt = datetime.strptime(dt, '%Y:%m:%d %H:%M:%S')
else:
dt = None
else:
dt = None
# Mode
mode = project_getmode_helper(request, project_id)
is_view_mode = mode == 'view'
return object_detail(
request,
Photo.objects.all(),
object_id=photo_id,
template_object_name='photo',
template_name='stockphoto/photo_detail.html',
extra_context={'project_id': project_id, 'datetime': dt, 'is_view_mode': is_view_mode}
)
def photo_delete(request, project_id, photo_id):
if not request.GET.get('ajax'):
return project_overview(request, project_id)
try:
photo = Photo.objects.get(id=photo_id)
except:
pass
#return HttpResponse('')
gallery_id = photo.gallery.id
photo_id = None
if photo.next():
photo_id = photo.next().id
elif photo.prev():
photo_id = photo.prev().id
photo.delete()
if photo_id:
return photo_detail(request, project_id, photo_id)
else:
gallery = Gallery.objects.get(id=gallery_id)
gallery.delete()
return gallery_list(request, project_id)
class ZipFileField(forms.FileField):
def __init__(self, *args, **kwargs):
super(forms.FileField, self).__init__(*args, **kwargs)
def clean(self, data, initial=None):
super(forms.FileField, self).clean(initial or data)
zip_file = zipfile.ZipFile(data)
if zip_file.testzip():
raise forms.ValidationError(self.error_messages['invalid'])
return data
class ImportForm(forms.Form):
zip_file = ZipFileField(required=True)
photographer = forms.CharField()
date = forms.DateField(required=False)
#@permission_required('gallery.add_photo')
def import_photos(request, thegallery):
"""Import a batch of photographs uploaded by the user.
Import a batch of photographs uploaded by the user, all with
the same information for gallery, photographer and date. The
title will be set from the filename, and the description will be
blank. Self-respecting photographers will edit the fields for
each photograph; this is just a way to get a bunch of photographs
uploaded quickly.
The photographs should be wrapped up in a zip archive. The
archive will be unpacked (and flattened) in a temporary directory,
and all image files will be identified and imported into the
gallery. Other files in the archive will be silently ignored.
After importing the images, the view will display a page which may
contain the number of images imported, and a link to the gallery
into which the images were imported.
"""
# Check if the gallery is valid
gallery = get_object_or_404(Gallery, pk=thegallery)
# And that the user has permission to add photos
if not request.user.has_perm('gallery.add_photo'):
return http.HttpResponseForbidden("No permission to add photos")
if request.method == 'POST':
form = ImportForm(request.POST, request.FILES)
if form.is_valid():
# So now everything is okay
zf = zipfile.ZipFile(request.FILES['zip_file'])
default_date = form.cleaned_data['date']
if not default_date:
default_date = datetime.now()
destdir= os.path.join(settings.MEDIA_ROOT, STOCKPHOTO_BASE,
datetime.now().strftime("%Y/%m/%d/"))
if not os.path.isdir(destdir):
os.makedirs(destdir, 0775)
for filename in zf.namelist():
if filename.endswith('/'):
continue
photopath = os.path.join(destdir, os.path.basename(filename))
data = zf.read(filename)
info = zf.getinfo(filename)
try:
date = datetime(info.date_time[0],
info.date_time[1],
info.date_time[2])
except:
date = default_date
file_data = StringIO(data)
try:
Image.open(file_data)
except:
# don't save and process non Image files
continue
photo = file(photopath, "wb")
photo.write(data)
# Create the object
if photopath.startswith(os.path.sep):
photopath = photopath[len(settings.MEDIA_ROOT):]
photo = Photo(image=photopath, date=date,
photographer=form.cleaned_data['photographer'],
title = os.path.basename(filename),
gallery_id = thegallery)
# Save it -- the thumbnails etc. get created.
photo.save()
# And jump to the directory for this gallery
response = http.HttpResponseRedirect(reverse('stockphoto_gallery_detail',
kwargs={'object_id':
str(thegallery),}))
response['Pragma'] = 'no cache'
response['Cache-Control'] = 'no-cache'
return response
else:
return render_to_response('stockphoto/import_form.html',
dict(form=form, gallery=gallery),
context_instance=RequestContext(request))
else:
form = ImportForm()
return render_to_response('stockphoto/import_form.html',
dict(form=form, gallery=gallery),
context_instance=RequestContext(request))
#@login_required
def export(request, thegallery):
"""Export a gallery to a zip file and send it to the user.
"""
# Check if the gallery is valid
gallery = get_object_or_404(Gallery, pk=thegallery)
# gather up the photos into a new directory
tmpdir = mkdtemp()
for photo in gallery.photo_set.all():
shutil.copy(photo.get_image_filename(),
tmpdir)
files = [ os.path.join(tmpdir, ff) for ff in os.listdir(tmpdir) ]
outfile = NamedTemporaryFile()
zf = zipfile.ZipFile(outfile, "w",
compression=zipfile.ZIP_DEFLATED)
for filename in files:
zf.write(filename, arcname=os.path.basename(filename))
zf.close()
outfile.flush()
outfile.seek(0)
shutil.rmtree(tmpdir)
response = HttpResponse(outfile)
response['Content-Type'] = "application/zip"
response['Content-Length'] = str(os.stat(outfile.name)[stat.ST_SIZE])
response['Content-Disposition'] = "attachment; filename=photos.zip"
return response
| [
"crosalot@gmail.com"
] | crosalot@gmail.com |
f3e465d8abf97925aafc78f1129a2bbb9ec13c39 | 71877e3f343e3899da77878937362191fdc02a0f | /topo_management/make_topos.py | 4b1690d7c294a97ec079d7d60e92469cc7f79f95 | [] | no_license | micahjohnson150/scripts | 2a9007ae6d2ad3eec3596aff6e016f6d13fb0652 | 32a8322cab7463dbcc4d6042e7d53a03c2ee2654 | refs/heads/master | 2023-05-26T04:48:27.005338 | 2023-01-21T14:08:51 | 2023-01-21T14:08:51 | 144,737,605 | 0 | 0 | null | 2023-05-22T21:36:55 | 2018-08-14T15:17:16 | Python | UTF-8 | Python | false | false | 4,085 | py | #!/usr/bin/env python3
from os import listdir, walk, system
from os.path import isfile, isdir, basename, abspath, expanduser, split
from subprocess import check_output, Popen
import argparse
from basin_setup.basin_setup import Messages
"""
Every basin in my basin folder has a make file and each is constructed similarly.
Thie script will go through all the basin topos with a make file and execute
make < arg >
The following executes all the topos makefiles via make topo in every basin folder
e.g.
python make_topos.py topo
The following only runs the make topo command on tuolumne
e.g.
python make_topos.py topo -kw tuolumne
"""
out = Messages()
def has_hidden_dirs(p):
"""
Searches a string path to determine if there are hidden
"""
has_hidden_paths = False
for d in p.split('/'):
if d:
if d[0] == '.':
has_hidden_paths = True
return has_hidden_paths
def find_basin_paths(directory, indicator_folder="model_setup", indicator_file="Makefile"):
"""
Walks through all the folder in directory looking for a directory called
model setup, then checks to see if there is a Makefile, if there is then append
that path to a list and return it
"""
paths = []
directory = abspath(expanduser(directory))
# Allow for indicator files and dirs to be none
no_ind_file = (indicator_folder == None and indicator_file != None)
no_ind_dir = (indicator_folder != None and indicator_file == None)
no_dir_or_file = (indicator_folder == None and indicator_file == None)
# Get all the folders and stuff just one level up
for r, d, f in walk(directory):
# ignore hidden folders and the top level folder
if not has_hidden_dirs(r) and r != directory:
if (
# If no indicator file or directories append the path
no_dir_or_file or \
# no indicatory file is available only check the indicator folder
(no_ind_file and basename(r) == indicator_folder) or \
# if no indicator folder is available only check file
(no_ind_dir and indicator_file in f) or \
# Look for both the indicator file and folder
(basename(r) == indicator_folder and indicator_file in f)):
paths.append(r)
return paths
if __name__ == "__main__":
# Director of interest
basins_dir = "~/projects/basins"
parser = argparse.ArgumentParser(description='Utilize makefiles to make '
'mass operations on basins.')
parser.add_argument('command', metavar='cmd',
help='Pass a makefile command to execute on every basin')
parser.add_argument('--keyword','-kw', dest='kw',
help='Filter basin_ops paths for kw e.g. tuolumne will'
'find only one topo to process')
args = parser.parse_args()
# Grab a command passed in
make_cmd = args.command
count = 0
basins_attempted = 0
out.msg("Looking in {} for basins with makefiles...".format(basins_dir))
basin_paths = find_basin_paths(basins_dir, indicator_folder="model_setup",
indicator_file="Makefile")
if args.kw != None:
out.msg("Filtering basin paths using keyword: {}".format(args.kw))
basin_paths = [p for p in basin_paths if args.kw in p]
# Warn user if no matches found
if len(basin_paths) == 0:
out.error('{} not found in any ops paths'.format(args.kw))
for r in basin_paths:
topo_attempt = False
try:
cmd = "cd {} && make {}".format(r, make_cmd)
out.dbg(cmd)
s = Popen(cmd, shell=True)
s.wait()
topo_attempt = True
except Exception as e:
raise e
if topo_attempt:
basins_attempted += 1
#input("press enter to continue")
out.msg("Attempted to build {} topos".format(basins_attempted))
| [
"micah.johnson150@gmail.com"
] | micah.johnson150@gmail.com |
2e8c14b62dd14176853069323a21f7c111fded9d | a0dc10a222031ebace35908e6e451a93ee48a198 | /take_photo_store_in_db.py | 9f0adc6651c336a6dd231987df9dab3387ba8812 | [] | no_license | Eshan-K-Kaushal/Login-Authentication-With-Facial-Gesture-Recognition | e69c90f1ee3b123a9193933e28f0e5eaafc72dad | 85528ab8569719818fa2bc4d641b49abb0127132 | refs/heads/master | 2020-09-25T18:26:53.844641 | 2019-12-05T09:43:09 | 2019-12-05T09:43:09 | 226,063,293 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,838 | py | import cv2
import os
import buttonpython.faces_train as train_new_model
def takephoto():
uid =0
uid = input("Please enter user name")
cam = cv2.VideoCapture(0)
#cv2.namedWindow("Say Cheese")
img_counter = 0
num = 5
val = 0
newpath = "C:\\Users\\Admin\\buttonpython\\buttonpython\\images\\"+str(uid)
if not os.path.exists(newpath):
os.mkdir("C:\\Users\\Admin\\buttonpython\\buttonpython\\images\\"+str(uid))
path = 'C:\\Users\\Admin\\buttonpython\\buttonpython\\images\\'+str(uid)
while True:
#uid = input("Please enter user id")
ret, frame = cam.read()
cv2.imshow("Say Cheese-Normal-Smile", frame)
if not ret:
break
k = cv2.waitKey(1)
if k%256 == 27:
# ESC pressed
print("Escape hit, closing...")
break
elif k%256 == 32:
# SPACE pressed
while(img_counter<1):
img_name = "User_{}.png".format(uid)
for root, dirs, files in os.walk(path):
for file in files:
if file.endswith("png") or file.endswith("jpg") or file.endswith("jpeg"):
if os.path.isfile(img_name):
val += 1
img_name = "User.png"#+str(val)+str(uid)
#else:
#img_name = "User_{}.png".format(uid)
cv2.imwrite(os.path.join(path , img_name), frame)
print("{} written!".format(img_name))
img_counter += 1
break
#val += 1
break
print(img_name)
cam.release()
cv2.destroyAllWindows()
train_new_model.trainit()
| [
"noreply@github.com"
] | Eshan-K-Kaushal.noreply@github.com |
2e4f46a41395098ef6db3d13a48e0814d0edd127 | 7e274c0b78a9643082bb17ba3be577270afaeba6 | /codes/Python/file_system.py | 8b56e9e12be458b70e5e86c8a4af5a5561b12bc6 | [] | no_license | ithink20/Algorithms | 048492287b8222c819a48783506e55bb799d9a5b | 7dc06298b91ce88e22f74b4430051dd302102b68 | refs/heads/master | 2023-01-01T09:43:24.285326 | 2019-12-21T14:26:13 | 2019-12-21T14:26:13 | 122,069,734 | 3 | 3 | null | 2022-12-10T00:15:31 | 2018-02-19T13:55:49 | C++ | UTF-8 | Python | false | false | 527 | py | import os
def list_files(path_to_file):
for root, directories, filenames in os.walk(path_to_file):
level = root.replace(path_to_file, '').count(os.sep)
indent = ' ' * 4 * (level)
print('{}{}/'.format(indent, os.path.basename(root)))
subindent = ' ' * 4 * (level + 1)
for filename in filenames:
print('{}{}'.format(subindent, filename))
def main():
path_to_file = '/Users/vikaschaurasiya/Desktop'
list_files(path_to_file)
if __name__ == '__main__':
main()
| [
"suicidesquade21@gmail.com"
] | suicidesquade21@gmail.com |
160cd1cfdb5940fe29a73cf6358bf598df79e693 | 8c774738a90da3feb18ae8744cbe46bdda9901c6 | /lib/model/nms/nms_gpu.py | 64fd3258b38cef44dbd2b3980af7ea7f90bc5730 | [
"MIT"
] | permissive | SWHL/faster-rcnn.pytorch | 7476765b129896aa85e2c9e7186c3e8044b61339 | 348678bb1cad0fe21a1be3881e81ec59af02aa19 | refs/heads/master | 2020-09-22T00:16:22.226668 | 2019-12-15T11:35:13 | 2019-12-15T11:35:13 | 224,983,111 | 3 | 0 | MIT | 2019-11-30T08:36:57 | 2019-11-30T08:36:56 | null | UTF-8 | Python | false | false | 300 | py | from __future__ import absolute_import
import torch
import numpy as np
from ._ext import nms
import pdb
def nms_gpu(dets, thresh):
keep = dets.new(dets.size(0), 1).zero_().int()
num_out = dets.new(1).zero_().int()
nms.nms_cuda(keep, dets, num_out, thresh)
keep = keep[:num_out[0]]
return keep
| [
"1226778264@qq.com"
] | 1226778264@qq.com |
3e8728b1cd31d7507eded1ece1bafcde106c51ba | 1ee5fb62710706df9381d7134b84a8ee3c6a433c | /prime_generator.py | 10d0ded24c4b5625933db59f4eb79f3360260c2d | [] | no_license | piradata/URI_codes | 7b6bf819a29f69f2cfd60d767f7aa319bfa7757d | ef50d8acfee26e5e1ff263b3a7d9c787bb9ec2a8 | refs/heads/master | 2020-04-24T13:07:49.007219 | 2019-06-20T03:49:07 | 2019-06-20T03:49:07 | 171,976,770 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | #A é matriz q guarda numeros primos
#N é repedidor secundario(numero do primo a ser testado)
#I é repetidor primario(numero atual)
#K é quantidade de numeros primos
#Y é variavel de deslocamento de valor para K
A=[]
A.append(2);
A.append(3);
N = 0;
I = 3;
K = 2;
Y = 2;
while(K < 15000):
N = 1
while(N <= K):
if(I % A[N] == 0):
break;
if((N == K) or (A[N] > (1 + (I / A[N])))):
Y+=1
A.append(I)
break;
N+=1
I = I + 2
K = Y
print(A); | [
"noreply@github.com"
] | piradata.noreply@github.com |
6b5c09ffd56d692ba043c5d8a8401b54afbd1796 | fb7d3e2b146210a97346a72273c4a03faba4beb7 | /venv/Scripts/easy_install-3.7-script.py | af4817693569ff675aea319f4fae0d700cd4c52b | [] | no_license | eaxcggdiwmw/- | af287ddbe1c2d00853107eb1751e426d7851689a | 8fc5f549b88d5c2aa978a4fa811d54405dda07e6 | refs/heads/master | 2020-04-16T02:35:31.582588 | 2019-01-11T09:18:15 | 2019-01-11T09:18:15 | 165,201,408 | 0 | 0 | null | null | null | null | WINDOWS-1252 | Python | false | false | 455 | py | #!D:\Êä³ö¼þ\ÅÀ³æ¿ò¼Ü\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"379864478@qq.com"
] | 379864478@qq.com |
05bce0dc7b284baba067637aeeb6efa9d33144b0 | 405d49101193df51af81a020a991897deebe661b | /src/shift_reads.py | 6a54fcacfaff24cd40589dbc42095703664c22e9 | [] | no_license | HuGKevin/nfkb_seq | bd0319e7a854ddeb1b725aaba253031659ff7e3d | 58ce67a4a9b910d48bb2bbc2d1e52a6f17c966aa | refs/heads/master | 2022-11-05T01:00:50.286412 | 2020-07-07T17:05:16 | 2020-07-07T17:05:16 | 262,151,643 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,625 | py | #
# shift_reads.py
#
# This script shifts aligned ATAC-seq reads for transcription factor footprinting.
# Reads aligning to the positive strand are shifted by +4 nt; reads aligning to the
# negative strand are shifted by -5 nt.
#
# This is required for the ATAC-seq protocol in Buenrostro et al. (2013) Nat Methods
# 10(12):1213-1218.
import os.path
import argparse
import re
import pysam
parser = argparse.ArgumentParser(description = "This script shifts aligned ATAC-seq reads for transcription factor footprinting. Reads aligning to the positive strand are shifted by +4 nt; reads aligning to the negative strand are shifted by -5 nt. This is required for the ATAC-seq protocol in Buenrostro et al. (2013) Nat Methods 10(12):1213-1218.")
parser.add_argument("infile", metavar = "<input file>", help = "This is assumed to be in BAM format, and must end with a .bam extension.")
parser.add_argument("-o", dest = "outfile", metavar = "<output file>", required = False, help = "If this is not specified, output will be written to a file where the .bam extension of <input file> is replaced with .shifted.bam")
parser.add_argument("-mf", dest = "maxfragment", metavar = "[max frag size]", type = int, required = False, help = "Only include fragments <= a maximum size, e.g. sub-nucleosomal fragments <= 100 nt per Mo et al. (2015) Neuron 86(6):1369-1384")
args = parser.parse_args()
infile = ""
outfile = ""
if (not re.search(r"\.bam$", args.infile, re.IGNORECASE)):
# If input file does not end with .bam (case insensitive), then exit with error
parser.print_help()
parser.exit(status = 1, message = "\nERROR: Input file does not end with .bam\n")
elif (not args.outfile):
# Construct output file name from input file name
regex = re.compile(r"\.bam$", re.IGNORECASE)
infile = args.infile
outfile = regex.sub(r".shifted.bam", args.infile)
else:
# User has specified both input and output file names, go with those
infile = args.infile
outfile = args.outfile
# Check to see that input file exists before proceeding with processing
if(os.path.isfile(infile)):
in_file = pysam.AlignmentFile(infile, "rb")
out_file = pysam.AlignmentFile(outfile, "wb", template=in_file)
print "Processing %s." % infile
if(args.maxfragment):
# Exclude fragments above a specified size maxfragment
# Loop over all reads in input file
for read in in_file.fetch(until_eof = True):
if(abs(read.template_length) <= args.maxfragment):
# The fragment is <= maximum size specified by user
if read.is_reverse:
# Shift read by -5 bp if on negative strand
read.reference_start -= 5
else:
# Shift read by +4 bp if on positive strand
read.reference_start += 4
out_file.write(read)
else:
# Include all fragments
# Loop over all reads in input file
for read in in_file.fetch(until_eof = True):
if read.is_reverse:
# Shift read by -5 bp if on negative strand
read.reference_start -= 5
else:
# Shift read by +4 bp if on positive strand
read.reference_start += 4
out_file.write(read)
else:
# Input file does not exist
parser.print_help()
parser.exit(status = 1, message = "\nERROR: Input file %s does not exist\n" % infile)
print "Writing output to %s." % outfile
in_file.close
out_file.close
| [
"kevin.hu@yale.edu"
] | kevin.hu@yale.edu |
692f6b88d3ccf72cfc834898c9afebb57e40c37a | eb7e5d8f738385095902702fb33142289ee1bb64 | /Climbing the Leaderboard/solution.py | 72cefb244c4c9512abe1a1bce896fc1d55371045 | [] | no_license | Raamkishore/30-Days-of-Coding | f7ecfe40b526ff3c2c968f788a321c1448b8e609 | c79294fb212b76f3ed9aef67c2cf8ce6ae99dc33 | refs/heads/master | 2022-11-12T12:42:20.603061 | 2020-07-09T05:03:26 | 2020-07-09T05:03:26 | 271,331,989 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,161 | py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the climbingLeaderboard function below.
def climbingLeaderboard(scores, alice):
temp = len(scores)-1
result = []
rank = 1
d = {}
for i in scores:
if i not in d:
d[i] = rank
rank += 1
for j in alice:
if j in d:
result.append(str(d[j]))
else:
for k in range(temp, -1, -1):
if j < scores[k]:
result.append(str(d[scores[k]]+1))
temp = k
break
elif j > scores[0]:
result.append("1")
break
return result
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
scores_count = int(input())
scores = list(map(int, input().rstrip().split()))
alice_count = int(input())
alice = list(map(int, input().rstrip().split()))
result = climbingLeaderboard(scores, alice)
fptr.write('\n'.join(map(str, result)))
fptr.write('\n')
fptr.close()
| [
"noreply@github.com"
] | Raamkishore.noreply@github.com |
13ce13acc0ee3b59068a41e4508f062418b18a4d | 124d231d5d03546b724ea998460428f8f0c66948 | /ARRDAN001_Prac1.py | c7080424c8be9ca782a41a3371a691fa8324bf83 | [] | no_license | danaarries/Prac1 | 7fc87fb4465026a946234738f43d031c3627e00a | 4e610d17c930e96c7af3eb9c81e44dd9db6d9350 | refs/heads/master | 2020-06-25T12:53:30.639579 | 2019-07-28T16:41:42 | 2019-07-28T16:41:42 | 199,313,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,306 | py | #!/usr/bin/python3
"""
Python Practical Template
Keegan Crankshaw
Readjust this Docstring as follows:
Names: <Dana Arries>
Student Number: <ARRDAN001>
Prac: <1>
Date: <22/07/2019>
"""
# import Relevant Librares
import RPi.GPIO as GPIO
from itertools import product #to access product function used create state LED values
GPIO.setmode(GPIO.BCM)
GPIO.setup(17, GPIO.IN, pull_up_down=GPIO.PUD_UP) #setting the button to increment LEDs
GPIO.setup(27, GPIO.IN, pull_up_down=GPIO.PUD_UP) #setting the button to decrement LEDs
GPIO.setup(22,GPIO.OUT) #setting the lowest bit LED
GPIO.setup(5,GPIO.OUT) #setting the middle bit LED
GPIO.setup(6,GPIO.OUT) #setting the highest bit LED
GPIO.output(22,0) #ensure all LEDs are off when the programme starts
GPIO.output(5,0)
GPIO.output(6,0)
global state #make an array of each state needed for binary counter
state=list(product([0,1], repeat=3))
global count #make a variabl counter to step through each state in array
count=0 #initialise the counter to 0 so all LEDs are off when programme starts
# Logic that you write
def main():
GPIO.setwarnings(False) #set the warnings off and ensures main function has something in it so it runs
def callback_increment(channel): #the function to increase the value shown on the LEDs
global count # call global count to access count variable
count+=1 #when increment button is pressed increase the count variable to access the next state in the array
if (count==8):
count=0 #ensures that LED value wraps around when counter reaches 8
GPIO.output(22,state[count][2]) #lowest bit value assigned to this LED
GPIO.output(5,state[count][1]) #middle bit value assigned to this LED
GPIO.output(6,state[count][0]) #highest bit value assigned to this LED
def callback_decrement(channel): #the function to decrease the value shown on the LEDs
global count # call global count to access count variable
count-=1 #when decrement button is pressed decrease the count variable to access the previous state in the array
if (count==-1): #ensures that LED value wraps around when counter reaches -1
count=7
GPIO.output(22,state[count][2]) #lowest bit value assigned to this LED
GPIO.output(5,state[count][1]) #middle bit value assigned to this LED
GPIO.output(6,state[count][0]) #highest bit value assigned to this LED
GPIO.add_event_detect(17, GPIO.FALLING, callback=callback_increment,bouncetime=150) #call interrupt when increment button pressed (falling edge)
GPIO.add_event_detect(27, GPIO.FALLING, callback=callback_decrement,bouncetime=150) #call interrupt when decrement button pressed (falling edge)
# Only run the functions if
if __name__ == "__main__":
# Make sure the GPIO is stopped correctly
try:
while True:
main() #runs main function until programme is terminated
#
GPIO.output(22,0)
GPIO.output(5,0)
GPIO.output(6,0)
GPIO.cleanup() #turns off GPIOs when main function ends
except KeyboardInterrupt:
print("Exiting gracefully")
# Turn off your GPIOs here
GPIO.cleanup()
# except e:
# print("Some other error occurred")
# print(e.message) ##the programme wont run unless this except statement is commented out
| [
"arrdan001@myuct.ac.za"
] | arrdan001@myuct.ac.za |
0fbe76b6b25a22fd362a69283881ff1185437b93 | 47dec5404e8869f55c8eb8ff2b543bbc2df6bd9a | /kipoi_utils/external/flatten_json.py | d4db4e2a70f34a299a0fb159c9711038d842cb1c | [
"MIT"
] | permissive | bfclarke/kipoi-utils | cd283ecbbc13afad5d0af7cbdabd8e55b24a9922 | 2f5f114f70aa80303478aeadb15862dadad378e1 | refs/heads/master | 2022-03-10T06:20:19.998426 | 2019-10-07T15:15:02 | 2019-10-07T15:15:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,631 | py | """
Modified from https://github.com/amirziai/flatten
MIT License
Copyright (c) 2016 Amir Ziai
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import sys
import json
import collections
from collections import Iterable, OrderedDict
import six
def check_if_numbers_are_consecutive(list_):
"""
Returns True if numbers in the list are consecutive
:param list_: list of integers
:return: Boolean
"""
return all((True if second - first == 1 else False
for first, second in zip(list_[:-1], list_[1:])))
def _construct_key(previous_key, separator, new_key):
"""
Returns the new_key if no previous key exists, otherwise concatenates
previous key, separator, and new_key
:param previous_key:
:param separator:
:param new_key:
:return: a string if previous_key exists and simply passes through the
new_key otherwise
"""
if previous_key:
return u"{}{}{}".format(previous_key, separator, new_key)
else:
return new_key
# Overrides flatten
def flatten(dd, separator='_', prefix='', is_list_fn=lambda x: isinstance(x, list)):
"""Flatten a nested dictionary/list
Args:
separator: how to separate different hirearchical levels
prefix: what to pre-append to the function
is_list_fn: function to determine whether to split the list/numpy.array into indvidual classes or
to include the element as value.
"""
if isinstance(dd, collections.Mapping):
return {prefix + separator + k if prefix else k: v
for kk, vv in six.iteritems(dd)
for k, v in six.iteritems(flatten(vv, separator, kk, is_list_fn))
}
elif is_list_fn(dd):
return {prefix + separator + k if prefix else k: v
for kk, vv in enumerate(dd)
for k, v in six.iteritems(flatten(vv, separator, str(kk), is_list_fn))
}
else:
return {prefix: dd}
def flatten_ordered(dd, separator='_', prefix='', is_list_fn=lambda x: isinstance(x, list)):
"""Flatten a nested dictionary/list
Args:
separator: how to separate different hirearchical levels
prefix: what to pre-append to the function
is_list_fn: function to determine whether to split the list/numpy.array into indvidual classes or
to include the element as value.
"""
if isinstance(dd, collections.Mapping):
if not dd:
return dd
return OrderedDict([(prefix + separator + k if prefix else k, v)
for kk, vv in six.iteritems(dd)
for k, v in six.iteritems(flatten_ordered(vv, separator, kk, is_list_fn))
])
elif is_list_fn(dd):
if not dd:
return dd
return OrderedDict([(prefix + separator + k if prefix else k, v)
for kk, vv in enumerate(dd)
for k, v in six.iteritems(flatten_ordered(vv, separator, str(kk), is_list_fn))
])
else:
return OrderedDict([(prefix, dd)])
# def flatten(nested_dict, separator="_", root_keys_to_ignore=set()):
# """
# Flattens a dictionary with nested structure to a dictionary with no
# hierarchy
# Consider ignoring keys that you are not interested in to prevent
# unnecessary processing
# This is specially true for very deep objects
# :param nested_dict: dictionary we want to flatten
# :param separator: string to separate dictionary keys by
# :param root_keys_to_ignore: set of root keys to ignore from flattening
# :return: flattened dictionary
# """
# assert isinstance(nested_dict, dict), "flatten requires a dictionary input"
# assert isinstance(separator, six.string_types), "separator must be string"
# # This global dictionary stores the flattened keys and values and is
# # ultimately returned
# flattened_dict = dict()
# def _flatten(object_, key):
# """
# For dict, list and set objects_ calls itself on the elements and for
# other types assigns the object_ to
# the corresponding key in the global flattened_dict
# :param object_: object to flatten
# :param key: carries the concatenated key for the object_
# :return: None
# """
# # Empty object can't be iterated, take as is
# if not object_:
# flattened_dict[key] = object_
# # These object types support iteration
# elif isinstance(object_, dict):
# for object_key in object_:
# if not (not key and object_key in root_keys_to_ignore):
# _flatten(object_[object_key], _construct_key(key,
# separator,
# object_key))
# elif isinstance(object_, list) or isinstance(object_, set):
# for index, item in enumerate(object_):
# _flatten(item, _construct_key(key, separator, index))
# # Anything left take as is
# else:
# flattened_dict[key] = object_
# _flatten(nested_dict, None)
# return flattened_dict
flatten_json = flatten
def _unflatten_asserts(flat_dict, separator):
assert isinstance(flat_dict, dict), "un_flatten requires dictionary input"
assert isinstance(separator, six.string_types), "separator must be string"
# assert all((not value or not isinstance(value, Iterable) or
# isinstance(value, six.string_types)
# for value in flat_dict.values())), "provided dict is not flat"
def unflatten(flat_dict, separator='_'):
"""
Creates a hierarchical dictionary from a flattened dictionary
Assumes no lists are present
:param flat_dict: a dictionary with no hierarchy
:param separator: a string that separates keys
:return: a dictionary with hierarchy
"""
_unflatten_asserts(flat_dict, separator)
# This global dictionary is mutated and returned
unflattened_dict = dict()
def _unflatten(dic, keys, value):
for key in keys[:-1]:
dic = dic.setdefault(key, {})
dic[keys[-1]] = value
for item in flat_dict:
_unflatten(unflattened_dict, item.strip(separator).split(separator), flat_dict[item])
return unflattened_dict
def unflatten_list(flat_dict, separator='_'):
"""
Unflattens a dictionary, first assuming no lists exist and then tries to
identify lists and replaces them
This is probably not very efficient and has not been tested extensively
Feel free to add test cases or rewrite the logic
Issues that stand out to me:
- Sorting all the keys in the dictionary, which specially for the root
dictionary can be a lot of keys
- Checking that numbers are consecutive is O(N) in number of keys
:param flat_dict: dictionary with no hierarchy
:param separator: a string that separates keys
:return: a dictionary with hierarchy
"""
_unflatten_asserts(flat_dict, separator)
# First unflatten the dictionary assuming no lists exist
unflattened_dict = unflatten(flat_dict, separator)
def _convert_dict_to_list(object_, parent_object, parent_object_key):
if isinstance(object_, dict):
try:
keys = [int(key) for key in object_]
keys.sort()
except (ValueError, TypeError):
keys = []
keys_len = len(keys)
if (keys_len > 0 and sum(keys) ==
int(((keys_len - 1) * keys_len) / 2) and keys[0] == 0 and
keys[-1] == keys_len - 1 and
check_if_numbers_are_consecutive(keys)):
# The dictionary looks like a list so we're going to replace it
parent_object[parent_object_key] = []
for key_index, key in enumerate(keys):
parent_object[parent_object_key].append(object_[str(key)])
# The list item we just added might be a list itself
# https://github.com/amirziai/flatten/issues/15
_convert_dict_to_list(parent_object[parent_object_key][-1],
parent_object[parent_object_key],
key_index)
for key in object_:
if isinstance(object_[key], dict):
_convert_dict_to_list(object_[key], object_, key)
_convert_dict_to_list(unflattened_dict, None, None)
return unflattened_dict
| [
"derthorstenbeier@gmail.com"
] | derthorstenbeier@gmail.com |
727e3d3e9f75003187b98333d2bb8945ad4971c7 | 7c32abb718368f9c235c63c00dee478c061e2846 | /projects/Insurance Premium Prediction/TrainingDataIngestion/dbOperation.py | 552d374a380389d595f5355687cb6236628f2028 | [] | no_license | motheesh/DataScience | 13de80f624d95f9ca8978798e3d35b4b44526603 | ad7cdaef6bc608768393e00ac430c20db6e65236 | refs/heads/main | 2023-07-12T20:52:10.053112 | 2021-08-22T13:40:51 | 2021-08-22T13:40:51 | 372,164,482 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,076 | py | from cassandra.cluster import Cluster
from cassandra.auth import PlainTextAuthProvider
from cassandra.query import SimpleStatement, BatchStatement
from cassandra.policies import RetryPolicy
import numpy as np
import config
from ApplicationLogger.logger import logger
from TrainingDataValidation.ValidateTrainingData import TrainValidation
import pandas as pd
class dboperations:
def __init__(self):
self.TrainingLogPath="./TrainingLog/DataIngestionLog"
self.TrainDbLogger=logger(self.TrainingLogPath)
self.session=self.CreateSession()
def CreateSession(self):
try:
self.TrainDbLogger.log("info",f"creating DB connection starts ")
cloud_config= {
'secure_connect_bundle': config.BUNDLE_PATH
}
auth_provider = PlainTextAuthProvider(config.CLIENT_ID, config.SECRET_KEY)
cluster = Cluster(cloud=cloud_config, auth_provider=auth_provider,protocol_version=4)
self.cluster=cluster
session = cluster.connect()
self.TrainDbLogger.log("info",f"created DB connection")
return session
except Exception as e:
self.TrainDbLogger.log("error",f"error during db connection {e}")
raise Exception(e)
def close(self):
try:
self.TrainDbLogger.log("info",f"started close DB connection operation")
self.cluster.shutdown()
self.TrainDbLogger.log("info",f"closed DB connection successfully")
except Exception as e:
self.TrainDbLogger.log("error",f"error while closing db connection {e}")
def executeQuery(self,query,values=[]):
try:
self.TrainDbLogger.log("info",f"executing query starts")
session=self.session
if len(values)>0:
query=self.prepareQuery(session,query,values)
result=session.execute(query)
self.TrainDbLogger.log("info",f"Ending query execution")
return result
except Exception as e:
self.TrainDbLogger.log("error",f"error during query execution {e}")
raise Exception(e)
def executeQueryOne(self,query,values=[]):
try:
self.TrainDbLogger.log("info",f"starting one query execution")
session=self.session
if len(values)>0:
query=self.prepareQuery(session,query,values)
result=session.execute(query).one()
self.TrainDbLogger.log("info",f"Ending one query execution")
return result
except Exception as e:
self.TrainDbLogger.log("error",f"error during query execution {e}")
raise Exception(e)
def truncateTable(self,query):
try:
self.TrainDbLogger.log("info",f"starting Truncate table Operation")
result=self.executeQueryOne(query)
self.TrainDbLogger.log("info",f"Ending Truncate table Operation")
except Exception as e:
self.TrainDbLogger.log("error",f"error during Truncate table {e}")
raise Exception(e)
def createTable(self,query):
try:
self.TrainDbLogger.log("info",f"starting create table Operation")
result=self.executeQueryOne(query)
self.TrainDbLogger.log("info",f"Ending create table Operation")
except Exception as e:
self.TrainDbLogger.log("error",f"error during creating table {e}")
raise Exception(e)
def dropTable(self,query):
try:
self.TrainDbLogger.log("info",f"starting drop table Operation")
result=self.executeQuery(query)
self.TrainDbLogger.log("info",f"Ending drop table Operation")
except Exception as e:
self.TrainDbLogger.log("error",f"error during dropping table {e}")
raise Exception(e)
def getTrainData(self,query):
try:
self.TrainDbLogger.log("info",f"starting get train data Operation")
result=self.executeQuery(query)
except Exception as e:
self.TrainDbLogger.log("error",f"error during getting data from table using select query {e}")
self.TrainDbLogger.log("info",f"Ending get train data Operation")
return result
def prepareBatchData(self,query,batchList,columns):
batch=BatchStatement()
try:
self.TrainDbLogger.log("info",f"starting Batch Data query preperation")
for i in range(0,len(batchList)):
values=tuple([batchList.loc[i][j] if j=="id" else str(batchList.loc[i][j]) for j in columns])
#print(values)
#print(query)
batch.add(SimpleStatement(query),values )
self.TrainDbLogger.log("info","Ending Batch Data query preperation")
except Exception as e:
self.TrainDbLogger.log("error",f"Error while preparing batch insert query {e}")
raise Exception(e)
return batch
def insertBatchData(self,query,batchList,columns):
try:
self.TrainDbLogger.log("info",f"starting Batch data insertion")
session=self.session
batch=self.prepareBatchData(query,batchList,columns)
result=session.execute(batch)
self.TrainDbLogger.log("info",f"Ending Batch data insertion")
return 1
except Exception as e:
self.TrainDbLogger.log("error",f"error during batch data insertion {e}")
#raise Exception(e)
def prepareQuery(self,session,query,values):
try:
self.TrainDbLogger.log("info",f"starting query preperation for execution")
stmt=session.prepare(query)
qry=stmt.bind(values)
self.TrainDbLogger.log("info",f"Ending query preperation for execution")
return qry
except Exception as e:
self.TrainDbLogger.log("error",f"error during query preparation {e}")
raise Exception(e) | [
"noreply@github.com"
] | motheesh.noreply@github.com |
2a8953a9839de6581e4caa79cda9cb3036c84a36 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03834/s172653587.py | 5c52cb64a75d2d12e1141735e2cd2b29c9007926 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 44 | py | a = input()
b = a.replace(",", " ")
print(b) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
578c5a1a6ff22e80aa35320182614bae82dfd05a | c51b70a06a7bef9bd96f06bd91a0ec289b68c7c4 | /src/Snakemake/rules/Imbalance/Imbalance.smk | 3d777e988559e4db91c171cc36dc8db59f4b607b | [] | no_license | clinical-genomics-uppsala/TSO500 | 3227a65931c17dd2799dbce93fe8a47f56a8c337 | b0de1d2496b6c650434116494cef721bdc295528 | refs/heads/master | 2023-01-10T01:41:51.764849 | 2020-11-05T14:11:25 | 2020-11-05T14:11:25 | 218,708,783 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 390 | smk |
rule imbalance :
input:
bams = ["RNA_TST170/bam_files/" + s + ".bam" for s in config["RNA_Samples"]]
output:
imbalance_all = "Results/RNA/Imbalance/imbalance_all_gene.txt",
imbalance = "Results/RNA/Imbalance/imbalance_called_gene.txt"
run:
import subprocess
subprocess.call("python src/Imbalance.py " + " ".join(input.bams), shell=True)
| [
"jonas.almlof@igp.uu.se"
] | jonas.almlof@igp.uu.se |
d01833d153c6d7f16c878d087755444fc30b886e | 713197a9519d72610804e1389e57d7c738a3d90e | /tienda/migrations/0001_initial.py | e4c0ac5980d6a8b34b1f42dc58ca9ffe4a55c7e6 | [] | no_license | lucianocanales/DjangoProject | 08cb8bbb8f630f48b447913f8a72ad7e5383db68 | 8491b0c1d1b8d4fe45429e978b67b08abd9600bd | refs/heads/master | 2023-02-27T12:10:23.470759 | 2021-02-13T00:09:58 | 2021-02-13T00:09:58 | 335,425,577 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,414 | py | # Generated by Django 3.1.3 on 2021-01-15 21:48
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='caracteristica',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=150)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now_add=True)),
],
options={
'verbose_name': 'Caracteristicas',
'verbose_name_plural': 'Caracteristica',
},
),
migrations.CreateModel(
name='categoria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=50)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now_add=True)),
('padre', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tienda.categoria')),
],
options={
'verbose_name': 'Cateegorias',
'verbose_name_plural': 'Categoria',
},
),
migrations.CreateModel(
name='imagen',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('imagen', models.ImageField(blank=True, null=True, upload_to='productos')),
('estado', models.CharField(choices=[('active', 'Activa'), ('Inactiva', 'Inactiva')], default='Inactiva', max_length=50)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now_add=True)),
],
options={
'verbose_name': 'Imagenes',
'verbose_name_plural': 'Imagen',
},
),
migrations.CreateModel(
name='Producto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=50)),
('descripcion', models.TextField()),
('precio', models.FloatField()),
('ventas', models.IntegerField(default=0)),
('estado', models.CharField(choices=[('nuevo', 'Nuevo'), ('usado', 'Usado'), ('virtual', 'Virtual')], default='Nuevo', max_length=50)),
('stock', models.IntegerField(default=0)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now_add=True)),
('caracteristica', models.ManyToManyField(to='tienda.caracteristica')),
('categorias', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='tienda.categoria')),
('usuario', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Productos',
'verbose_name_plural': 'Producto',
},
),
migrations.CreateModel(
name='Imagen_producto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now_add=True)),
('caracteristica', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tienda.producto', verbose_name='Producto')),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tienda.imagen', verbose_name='Imagen')),
],
options={
'verbose_name': 'Imagenes de productos',
'verbose_name_plural': 'Imagen de producto',
},
),
]
| [
"lucianocanales@gmail.com"
] | lucianocanales@gmail.com |
ecf912345fbadbd8ec8aaf0e1299ca4407c95bdd | bc3fb3e8a659375a6af9f1dc958a2db5e1e7b87a | /python/hrpsys-plot/datalogger-plotter-with-pyqtgraph.py | 6f4a16254f35495f7331d2b1fb744a4f0d8f2d05 | [] | no_license | kindsenior/kuroiwa_demos | 0bfa973b0e16202c79420bab4d376ee9a85c6f25 | 2f380c0729cb4fa87c3b76a723576cb45977ccec | refs/heads/master | 2021-01-14T10:48:13.065969 | 2015-12-22T14:49:38 | 2015-12-22T14:49:38 | 48,623,977 | 0 | 0 | null | 2015-12-26T21:23:58 | 2015-12-26T21:23:57 | null | UTF-8 | Python | false | false | 10,378 | py | #!/usr/bin/env python
import csv, argparse, numpy, math, time, struct, yaml, sys
try:
import pyqtgraph
except:
print "please install pyqtgraph. see http://www.pyqtgraph.org/"
sys.exit(1)
class DataloggerLogParserController:
def __init__(self, fname, yname, title):
self.fname = fname
with open(yname, "r") as f:
self.plot_dic = yaml.load(f)
# self.dataListDict = {'time':[]}
self.dataListDict = {}
self.app = pyqtgraph.Qt.QtGui.QApplication([])
self.view = pyqtgraph.GraphicsLayoutWidget()
self.view.setBackground('w')
if title == '':
self.view.setWindowTitle(fname.split('/')[-1])
else:
self.view.setWindowTitle(title)
self.items = []
self.row_num = sum([len(x[1]["field"]) for x in self.plot_dic.items()])
self.col_num = max([max([len(fld) for fld in x[1]["field"]]) for x in self.plot_dic.items()])
for r in range(self.row_num):
self.items.append([])
for c in range(self.col_num):
if c == 0:
p = self.view.addPlot(row=r, col=c, name='r'+str(r)+'c'+str(c))
else:
p = self.view.addPlot(row=r, col=c)
p.setXLink('r0c0')
self.items[r].append(p)
def readData(self, xmin, xmax):
print '[%f] : start readData' % (time.time() - start_time)
# store data
topic_list = list(set(reduce(lambda x, y : x + y, [x[1]["log"] for x in self.plot_dic.items()])))
for topic in topic_list:
self.dataListDict[topic] = [[]] # first list is for time
with open(self.fname + '.' + topic, 'r') as f:
reader = csv.reader(f, delimiter=' ')
for row in reader:
self.dataListDict[topic][0].append(float(row[0]))
dl = row[1:]
dl = filter(lambda x: x != '', dl)
self.dataListDict[topic].append([float(x) for x in dl])
# set the fastest time as 0
min_time = min([self.dataListDict[topic][0][0] for topic in topic_list])
for topic in topic_list:
self.dataListDict[topic][0] = [x - min_time for x in self.dataListDict[topic][0]]
# fix servoState
if 'RobotHardware0_servoState' in topic_list:
ss_tmp = self.dataListDict['RobotHardware0_servoState'][1:]
for i, ssl in enumerate(ss_tmp):
ss_tmp[i] = [struct.unpack('f', struct.pack('i', int(ss)))[0] for ss in ssl]
self.dataListDict['RobotHardware0_servoState'][1:] = ss_tmp
print '[%f] : finish readData' % (time.time() - start_time)
def plotData(self, mabiki):
print '[%f] : start plotData' % (time.time() - start_time)
# tm = self.dataListDict['time'][::mabiki]
color_list = ['b', 'g', 'r', 'c', 'm', 'y', 'k']
cur_row = 0
for plot in self.plot_dic.items(): # plot : ('joint_velocity', {'field':[[0,1],[2,3]], 'log':['rh_q', 'st_q']})
cur_fields = plot[1]['field']
cur_logs = plot[1]['log']
for cf in cur_fields: # cf : [0,1] -> [2,3]
for i, cl in enumerate(cur_logs): # cl : 'rh_q' -> 'st_q'
cur_data = numpy.array(self.dataListDict[cl][1:])
cur_tm = numpy.array(self.dataListDict[cl][0])
for cur_col in cf:
cur_plot_item = self.items[cur_row][cur_col-cf[0]]
cur_plot_item.setTitle(plot[0]+" "+str(cur_col))
cur_plot_item.showGrid(x=True, y=True)
cur_plot_item.addLegend(offset=(0, 0))
if cur_row == self.row_num -1:
cur_plot_item.setLabel("bottom", text="time", units="s")
if cur_col-cf[0] == 0:
tmp_units = None
if plot[0] == "12V" or plot[0] == "80V":
tmp_units = "V"
elif plot[0] == "current":
tmp_units = "A"
elif plot[0] == "temperature" or plot[0] == "joint_angle" or plot[0] == "attitude" or plot[0] == "tracking":
tmp_units = "deg"
elif plot[0] == "joint_velocity":
tmp_units = "deg/s"
elif plot[0] == "watt":
tmp_units = "W"
cur_plot_item.setLabel("left", text="", units=tmp_units)
# cur_plot_item.enableAutoSIPrefix(False)
if cl == 'RobotHardware0_servoState':
urata_len = 13
if plot[0] == "12V":
cur_plot_item.plot(cur_tm, cur_data[:, (urata_len+1) * cur_col + (9+1)][::mabiki], pen=pyqtgraph.mkPen('r', width=2), name='12V')
elif plot[0] == "80V":
cur_plot_item.plot(cur_tm, cur_data[:, (urata_len+1) * cur_col + (2+1)][::mabiki], pen=pyqtgraph.mkPen('g', width=2), name='80V')
elif plot[0] == "current":
cur_plot_item.plot(cur_tm, cur_data[:, (urata_len+1) * cur_col + (1+1)][::mabiki], pen=pyqtgraph.mkPen('b', width=2), name='current')
elif plot[0] == "temperature":
cur_plot_item.plot(cur_tm, cur_data[:, (urata_len+1) * cur_col + (0+1)][::mabiki], pen=pyqtgraph.mkPen('r', width=2), name='motor_temp')
cur_plot_item.plot(cur_tm, cur_data[:, (urata_len+1) * cur_col + (7+1)][::mabiki], pen=pyqtgraph.mkPen('g', width=1), name='motor_outer_temp')
elif plot[0] == "tracking":
cur_plot_item.plot(cur_tm, [math.degrees(x) for x in cur_data[:, (urata_len+1) * cur_col + (6+1)][::mabiki]], pen=pyqtgraph.mkPen('g', width=2), name='abs - enc')
elif plot[0] == "tracking":
if cl == "RobotHardware0_q":
cur_plot_item.plot(cur_tm, [math.degrees(x) for x in numpy.array(self.dataListDict['st_q'][1:])[:, cur_col][::mabiki] - cur_data[:, cur_col][::mabiki]], pen=pyqtgraph.mkPen('r', width=2), name="st_q - rh_q")
else:
pass
elif plot[0] == "joint_angle" or plot[0] == "joint_velocity" or plot[0] == "attitude":
cur_plot_item.plot(cur_tm, [math.degrees(x) for x in cur_data[:, cur_col][::mabiki]], pen=pyqtgraph.mkPen(color_list[i], width=len(cur_logs)-i), name=cl)
elif plot[0] == "watt":
if cl == "RobotHardware0_dq":
cur_plot_item.plot(cur_tm, [math.degrees(x) for x in numpy.array(self.dataListDict['RobotHardware0_tau'][1:])[:, cur_col][::mabiki] * cur_data[:, cur_col][::mabiki]], pen=pyqtgraph.mkPen(color_list[i], width=len(cur_logs)-i), name=cl, fillLevel=0, fillBrush=color_list[i])
else:
pass
elif plot[0] == "imu":
if cl == 'RobotHardware0_gsensor':
self.items[cur_row][0].plot(cur_tm, cur_data[:, cur_col][::mabiki], pen=pyqtgraph.mkPen(color_list[cur_col%3], width=3-cur_col%3), name=['x', 'y', 'z'][cur_col%3])
elif cl == 'RobotHardware0_gyrometer':
self.items[cur_row][1].plot(cur_tm, cur_data[:, cur_col][::mabiki], pen=pyqtgraph.mkPen(color_list[cur_col%3], width=3-cur_col%3), name=['x', 'y', 'z'][cur_col%3])
elif plot[0] == "comp":
cur_plot_item.plot(cur_tm, cur_data[:, cur_col][::mabiki], pen=pyqtgraph.mkPen(color_list[i], width=len(cur_logs)-i), name=cl)
if cur_col % 6 < 3: # position
cur_plot_item.setYRange(-0.025, +0.025) # compensation limit
else: # rotation
cur_plot_item.setYRange(math.radians(-10), math.radians(+10)) # compensation limit
else:
cur_plot_item.plot(cur_tm, cur_data[:, cur_col][::mabiki], pen=pyqtgraph.mkPen(color_list[i], width=len(cur_logs)-i), name=cl)
# calculate y range of each rows using autofit function and then link y range each row
y_min = min([p.viewRange()[1][0] for p in self.items[cur_row]])
y_max = max([p.viewRange()[1][1] for p in self.items[cur_row]])
if plot[0] != "joint_angle" and plot[0].find("_force") == -1 and plot[0] != "imu" and plot[0] != "comp":
self.items[cur_row][0].setYRange(y_min, y_max)
for p in self.items[cur_row]:
p.setYLink('r'+str(cur_row)+'c0')
# increase current row
cur_row = cur_row + 1
self.view.showMaximized()
print '[%f] : finish plotData' % (time.time() - start_time)
if __name__ == '__main__':
# time
start_time = time.time()
print '[%f] : start !!!' % (time.time() - start_time)
# args
parser = argparse.ArgumentParser(description='plot data from hrpsys log')
parser.add_argument('-f', type=str, help='input file', metavar='file', required=True)
parser.add_argument('--conf', type=str, help='configure file', metavar='file', required=True)
parser.add_argument('--min_time', type=float, help='xmin for graph : not implemented yet', default=0.0)
parser.add_argument('--max_time', type=float, help='xmax for graph : not implemented yet', default=0.0)
parser.add_argument('-t', type=str, help='title', default="")
parser.add_argument('--mabiki', type=int, help='mabiki step', default=1)
parser.set_defaults(feature=False)
args = parser.parse_args()
# main
a = DataloggerLogParserController(args.f, args.conf, args.t)
a.readData(args.min_time, args.max_time)
a.plotData(args.mabiki)
pyqtgraph.Qt.QtGui.QApplication.instance().exec_()
| [
"kuroiwa@jsk.imi.i.u-tokyo.ac.jp"
] | kuroiwa@jsk.imi.i.u-tokyo.ac.jp |
3bd820c2b47affa60cbbb287a97dcac4b21db76d | bf78b940cf87a9c33c494ac90f5c20fa2b35240f | /04_Null_Models/calc_dist_iter.py | a0f335858d711cdc1c67e514cad2e9c4a46a6649 | [] | no_license | Jiamingglyy/mouse_gradients | b03c9f8cee8020473656985414e0625f88e8ac36 | c2a42400e8dd81c68de0f9bf79183cd616192e66 | refs/heads/master | 2023-03-27T14:58:43.085936 | 2021-03-26T14:49:47 | 2021-03-26T14:49:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 901 | py | import numpy as np
import gdist
import argparse
if __name__=='__main__':
parser = argparse.ArgumentParser(description='')
parser.add_argument("-idx", dest="idx",required=True)
args = parser.parse_args()
idx = int(args.idx)
data_dir = '/home/julia/data/gradients/'
cortex = np.load(data_dir+'results/null_models/surface/cortex_mask.npy')
points = np.load(data_dir+'results/null_models/surface/points.npy')
faces = np.load(data_dir+'results/null_models/surface/faces.npy')
dist = gdist.compute_gdist(np.array(points, dtype=np.float64),
np.array(faces, dtype=np.int32),
source_indices=np.array([cortex[idx]], dtype=np.int32),
target_indices=np.array(cortex[idx+1:], dtype=np.int32))
np.save(data_dir+'results/null_models/surface/iter/idx_{}.npy'.format(idx), dist)
| [
"juhuntenburg"
] | juhuntenburg |
478c74baf9d087d205467e131408a15aa52e7eb7 | 5b51cc97595f5b246c6788608774f9f5839210ea | /setup.py | a56035ce72db24d2f0aac9e39edfd3471afa2a80 | [] | no_license | m-mix/djangocms-bootstrap3-grid | 3147b8e8a502d864c515d251ddeef0a504dcde0d | 38b900a42174d5be1a14f591f4486456ebb409b1 | refs/heads/master | 2021-01-23T03:48:28.831542 | 2014-02-27T13:20:42 | 2014-02-27T13:20:42 | 17,201,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,149 | py | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from djangocms_bootstrap3 import __version__
INSTALL_REQUIRES = []
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Communications',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
]
setup(
name='djangocms-bootstrap3-grid',
version=__version__,
description='Bootstrap3 grid system plugin for django CMS',
author='Maidakov Mikhail',
author_email='m-email@inbox.com',
url='https://github.com/m-mix/djangocms-bootstrap3-grid',
packages=find_packages(exclude=[]),
install_requires=INSTALL_REQUIRES,
license='LICENSE.txt',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
long_description=open('README.rst').read(),
include_package_data=True,
zip_safe=False
)
| [
"m-email@inbox.com"
] | m-email@inbox.com |
15cea4f928a57a80bc4a8c891bbc166135746b2c | 4201d4aff2f2d877fa75d6d971f7826d5d1369e3 | /product_onepage/settings.py | 91b09b1db1c82f31bfb8318f86917bf8e21a21ab | [
"MIT"
] | permissive | emencia/emencia-product-onepage | 4f5fb72cc47ca8725bc01c9c69a583126e7b8514 | 09cff26e97641412b297f977ca8c8045983bbf97 | refs/heads/master | 2020-04-13T09:31:04.787009 | 2015-01-13T01:14:00 | 2015-01-13T01:14:00 | 28,994,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,778 | py | # Dummy gettext
gettext = lambda s: s
# Plugins template choices
ONEPAGE_TAB_TEMPLATE_CHOICES = (
("product_onepage/tab.html", gettext("Default")),
)
ONEPAGE_SPEC_TEMPLATE_CHOICES = (
("product_onepage/spec.html", gettext("Default")),
)
ONEPAGE_BLURB_TEMPLATE_CHOICES = (
("product_onepage/blurb.html", gettext("Default")),
)
ONEPAGE_OVERVIEW_TEMPLATE_CHOICES = (
("product_onepage/overview.html", gettext("Default")),
)
ONEPAGE_PACK_TEMPLATE_CHOICES = (
("product_onepage/pack.html", gettext("Default")),
)
ONEPAGE_SUBSCRIBE_TEMPLATE_CHOICES = (
("product_onepage/subscribe.html", gettext("Default")),
)
ONEPAGE_VIDEO_TEMPLATE_CHOICES = (
("product_onepage/video.html", gettext("Default")),
)
ONEPAGE_TWENTYTWENTY_TEMPLATE_CHOICES = (
("product_onepage/twentytwenty.html", gettext("Default")),
)
# Plugins templates default choice
ONEPAGE_TAB_DEFAULT_TEMPLATE = ONEPAGE_TAB_TEMPLATE_CHOICES[0][0]
ONEPAGE_SPEC_DEFAULT_TEMPLATE = ONEPAGE_SPEC_TEMPLATE_CHOICES[0][0]
ONEPAGE_BLURB_DEFAULT_TEMPLATE = ONEPAGE_BLURB_TEMPLATE_CHOICES[0][0]
ONEPAGE_OVERVIEW_DEFAULT_TEMPLATE = ONEPAGE_OVERVIEW_TEMPLATE_CHOICES[0][0]
ONEPAGE_PACK_DEFAULT_TEMPLATE = ONEPAGE_PACK_TEMPLATE_CHOICES[0][0]
ONEPAGE_SUBSCRIBE_DEFAULT_TEMPLATE = ONEPAGE_SUBSCRIBE_TEMPLATE_CHOICES[0][0]
ONEPAGE_VIDEO_DEFAULT_TEMPLATE = ONEPAGE_VIDEO_TEMPLATE_CHOICES[0][0]
ONEPAGE_TWENTYTWENTY_DEFAULT_TEMPLATE = ONEPAGE_TWENTYTWENTY_TEMPLATE_CHOICES[0][0]
# Alignement options
ONEPAGE_BLURB_ALIGNMENT_CHOICES = (
('1', gettext(u'Extreme Left')),
('3', gettext(u'Left')),
('centered', gettext(u'Center')),
('6', gettext(u'Right')),
('8', gettext(u'Extreme Right')),
)
ONEPAGE_SPEC_ALIGNMENT_CHOICES = (
('left', _(u'Left')),
('right', _(u'Right')),
)
| [
"sveetch@gmail.com"
] | sveetch@gmail.com |
d2434930a13bebdaf26a631a418001629e7bb9b0 | c7c6ba345e3551621a18a730b2ea1cd59aa15401 | /scripts/vagharchakian2012temporal.py | 76f2d49949556f326487a4b1ecfe220d54084d6d | [] | no_license | jbpoline/pypreprocess | 3b68ffb059f6cbdeb5c7383c5fa9168515dc9cdf | ec459b1963ed5e881b662ac663a23d37169a3fa2 | refs/heads/master | 2021-01-21T17:16:49.384426 | 2014-08-15T14:50:14 | 2014-08-15T14:50:14 | 23,086,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,067 | py | import os
import sys
import glob
import multiprocessing
warning = ("%s: THIS SCRIPT MUST BE RUN FROM ITS PARENT "
"DIRECTORY!") % sys.argv[0]
banner = "#" * len(warning)
separator = "\r\n\t"
print separator + separator.join(["", banner, warning, banner, ""])
# spm_loader path
SPM_LOADER_DIR = os.path.join(os.path.dirname(os.path.split(
os.path.abspath(__file__))[0]),
"spm_loader")
sys.path.append(SPM_LOADER_DIR)
from spm_loader.spm import load_intra as load_spm
from spm_loader.utils import fix_docs, execute_glms
study = 'vagharchakian2012temporal'
root = ('/neurospin/unicog/protocols/IRMf'
'/Compression_Vagharchakian_new_2009/Subjects')
output_dir = '/volatile/brainpedia/protocols'
contrast_names = {
'A100': 'auditory sentences 100% duration',
'A100-A80': 'auditory sentences 100% - 80% duration',
'A20': 'auditory sentences 20% duration',
'A20-A40': 'auditory sentences 20% - 40% duration',
'A40': 'auditory sentences 40% duration',
'A40-A20': 'auditory sentences 40% - 20% duration',
'A40-A60': 'auditory sentences 40% - 60% duration',
'A60': 'auditory sentences 60% duration',
'A60-A40': 'auditory sentences 60% - 40% duration',
'A60-A80': 'auditory sentences 60% - 80% duration',
'A80': 'auditory sentences 80% duration',
'A80-A100': 'auditory sentences 80% - 100% duration',
'A80-A60': 'auditory sentences 80% - 60% duration',
'V100': 'visual sentences 100% duration',
'V100-V80': 'visual sentences 100% - 80% duration',
'V20': 'visual sentences 20% duration',
'V20-V40': 'visual sentences 20% - 40% duration',
'V40': 'visual sentences 40% duration',
'V40-V20': 'visual sentences 40% - 20% duration',
'V40-V60': 'visual sentences 40% - 60% duration',
'V60': 'visual sentences 60% duration',
'V60-V40': 'visual sentences 60% - 40% duration',
'V60-V80': 'visual sentences 60% - 80% duration',
'V80': 'visual sentences 80% duration',
'V80-V100': 'visual sentences 80% - 100% duration',
'V80-V60': 'visual sentences 80% - 60% duration',
'tt Audio': 'auditory sentences all compressions',
'tt Visuel': 'visual sentences all compressions',
}
definitions = {
'auditory vs visual sentences': {
'auditory sentences all compressions': 1,
'visual sentences all compressions': -1,
},
'visual vs auditory sentences': {
'auditory sentences all compressions': -1,
'visual sentences all compressions': 1,
},
# 'auditory 100 - 80 sentences': {
# 'auditory sentences 100% duration': 1,
# 'auditory sentences 80% duration': -1,
# },
# 'auditory 80 - 100 sentences': {
# 'auditory sentences 100% duration': -1,
# 'auditory sentences 80% duration': 1,
# },
'auditory normal speed vs bottleneck': {
'auditory sentences 100% duration': 1,
'auditory sentences 20% duration': -1,
},
'auditory bottleneck vs normal speed': {
'auditory sentences 100% duration': -1,
'auditory sentences 20% duration': 1,
},
# 'visual 100 - 80 sentences': {
# 'visual sentences 100% duration': 1,
# 'visual sentences 80% duration': -1,
# },
# 'visual 80 - 100 sentences': {
# 'visual sentences 100% duration': -1,
# 'visual sentences 80% duration': 1,
# },
'visual normal speed vs bottleneck': {
'visual sentences 100% duration': 1,
'visual sentences 20% duration': -1,
},
'visual bottleneck vs normal speed': {
'visual sentences 100% duration': -1,
'visual sentences 20% duration': 1,
},
'visual language bottleneck vs rest': {
'visual sentences 20% duration': 1,
},
'visual language normal speed vs rest': {
'visual sentences 100% duration': 1,
},
'auditory language normal speed vs rest': {
'auditory sentences 100% duration': 1,
},
'auditory language bottleneck vs rest': {
'auditory sentences 20% duration': 1,
},
}
def get_docs(inputs=False):
n_jobs = 24
docs = []
pool = multiprocessing.Pool(processes=n_jobs)
for subj_dir in glob.glob('%s/????????' % root):
mat = ('%s/fMRI/acquisition1/analysis'
'/model7_HRF_comp_FINAL/SPM.mat' % subj_dir)
ar = pool.apply_async(load_spm,
args=(mat, ),
kwds=dict(label=study,
inputs=inputs,
subject=-5,
study=study))
docs.append(ar)
pool.close()
pool.join()
docs = [doc.get() for doc in docs]
return fix_docs(docs, contrast_names)
# def get_infos():
# infos = grr.get_infos()
# mapping = {}
# for subject_dir in glob.glob(os.path.join(root, '[A-Z][A-Z]??????')):
# if os.path.isdir(subject_dir):
# label = subject_id = os.path.split(subject_dir)[1].lower()
# mapping[label] = infos.get(
# subject_id, {'subject_id': subject_id})
# return mapping
if __name__ == '__main__':
# sanitize command-line
if len(sys.argv) > 1:
output_dir = sys.argv[1]
docs = get_docs(inputs=True)
execute_glms(docs, output_dir, definitions,
dataset_id="vagharchakian2012temporal",
)
# need to resample...
# import nibabel as nb
# import numpy as np
# from nisl import resampling
# target_affine = np.array([[-3., 0., 0., 78.],
# [0., 3., 0., -111.],
# [0., 0., 3., -51.],
# [0., 0., 0., 1., ]])
# target_shape = (53, 63, 46)
# for niimg in glob.glob(os.path.join(
# output_dir, study, 'subjects', '*', '*_maps', '*.nii.gz')):
# print niimg
# img = resampling.resample_img(niimg, target_affine, target_shape)
# nb.save(img, niimg)
| [
"elvis.dohmatob@inria.fr"
] | elvis.dohmatob@inria.fr |
ad6a77825ba005305871456b1523c3a12198e415 | 2487c6a41e8f00b24e7f97b4251263c607f57e5a | /Python/2020-06-12-기본문법/unicode.py | 59c40660f4bc01919d9e069c0f720fecba2afe44 | [] | no_license | 0x000613/Outsourcing | 1dacf34431a70950153f699f99780d59ee42e2fa | d2d9cb7111c3b12e16fe42e73150b2f6e4f7e7bc | refs/heads/master | 2023-03-09T11:49:41.648198 | 2021-02-21T09:49:14 | 2021-02-21T09:49:14 | 329,673,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | # 대문자를 쉽게 가져올수 있는 string 모듈을 import한다.
from string import ascii_uppercase
# 대문자 리스트를 가져온다.
alphaList = list(ascii_uppercase)
# 개행을 관리하는 변수 nextLineCounter 초기화
nextLineCounter = 0
for alpha in alphaList:
# nextLineCounter를 4로 나눈 나머지 값이 0이면
if nextLineCounter % 4 == 0:
# 개행
print()
# 알파벳 : 알파벳 아스키코드값 탭 + 탭, 개행은 없음 출력한다.
print(alpha + " : " + str(ord(alpha)) + "\t\t", end='')
# nextLineCounter을 1만큼 증가시킨다.
nextLineCounter += 1 | [
"xeros.log@gmail.com"
] | xeros.log@gmail.com |
5ae5bed3931a773bc8c36609fb86ce98761182b9 | 646ce7f6bf8abda078a3c9ac6d408b20c0da181a | /ex4.py | 4581057424d96618236d4c3e8ebbff39a7ac8e94 | [] | no_license | NishaUSK/pythontraining | 1fcefe8dd58b845a2e89de134f611da39bff1905 | c964e12c30a18aa4773cebc1ba201524b13cf9a6 | refs/heads/master | 2020-04-09T22:23:39.839249 | 2018-12-11T14:31:44 | 2018-12-11T14:31:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | #conditional statement program
if 25 > 28 :
print("25 is greater than 28")
else:
print("25 is less than 28")
print("-----------------------------")
if 18 < 28:
print('True')
else:
print('False')
| [
"nisha.nescode@gmail.com"
] | nisha.nescode@gmail.com |
20fcd5d4e9c68f072f12665f4282389755541b28 | 50de76eb887892c2085e1aa898987962a5d75380 | /_1_PythonBasic/Reactive/5.2B_distinct_with_mapping.py | bd179c9d1443ceb29fd6932f737d2d033d35e7f2 | [] | no_license | cyrsis/TensorflowPY36CPU | cac423252e0da98038388cf95a3f0b4e62d1a888 | 6ada50adf63078ba28464c59808234bca3fcc9b7 | refs/heads/master | 2023-06-26T06:57:00.836225 | 2021-01-30T04:37:35 | 2021-01-30T04:37:35 | 114,089,170 | 5 | 2 | null | 2023-05-25T17:08:43 | 2017-12-13T07:33:57 | Jupyter Notebook | UTF-8 | Python | false | false | 163 | py | from rx import Observable
Observable.from_(["Alpha", "Beta", "Gamma", "Delta", "Epsilon"]) \
.distinct(lambda s: len(s)) \
.subscribe(lambda i: print(i))
| [
"em3888@gmail.com"
] | em3888@gmail.com |
7da3616259ad8ebcdde6f2c0278b1a810fc14f85 | 775e92797eb8beeb7262903e20652af377668eb4 | /Queue_UsingLinkedList.py | b682470e9811e56a99919e3bba37ce940db01796 | [] | no_license | tiennynyle/DSandAlgo_Practice | b29053ccfb03d62246ec0ac46954ed819730bab7 | 6ba8514eaa2dd6552792439fea0fbcd8e3e89a38 | refs/heads/master | 2022-11-13T12:44:40.509451 | 2020-07-07T16:34:15 | 2020-07-07T16:34:15 | 271,134,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,770 | py | class Node():
def __init__(self,value):
self.value = value
self.next = None
class Queue():
def __init__(self):
self.head = None
self.tail = None
self.num_elements = 0
def enqueue(self, value):
new_node = Node(value)
if self.head is None:
self.head = new_node
self.tail = self.head
else:
self.tail.next = new_node
self.tail = self.tail.next
self.num_elements += 1
def dequeue(self):
if self.is_empty():
return None
first_node = self.head.value # Get the value from the front of the queue (i.e., the head of the linked list)
self.head = self.head.next # Shift the head over so that it refers to the next node
self.num_elements -= 1 # Update the num_elements attribute
return first_node
def size(self):
return self.num_elements
def is_empty(self):
return self.num_elements == 0
# Setup
q = Queue()
q.enqueue(1)
q.enqueue(2)
q.enqueue(3)
# Test size
print ("Pass" if (q.size() == 3) else "Fail")
# Test dequeue
print ("Pass" if (q.dequeue() == 1) else "Fail")
# Test enqueue
q.enqueue(4)
print ("Pass" if (q.dequeue() == 2) else "Fail")
print ("Pass" if (q.dequeue() == 3) else "Fail")
print ("Pass" if (q.dequeue() == 4) else "Fail")
q.enqueue(5)
print ("Pass" if (q.size() == 1) else "Fail")
'''Time Complexity
When we use enqueue, we simply create a new node and add it to the tail of the list. And when we dequeue an item, we simply get the value from the head of the list and then shift the head variable so that it refers to the next node over.
Both of these operations happen in constant time—that is, they have a time-complexity of O(1).'''
| [
"noreply@github.com"
] | tiennynyle.noreply@github.com |
ccf5e0fbc0904ccbc4c7291540962c2be04e1e27 | d785e993ed65049c82607a1482b45bddb2a03dda | /nano2017/cfg2018/GluGluToContinToZZTo4e_cfg.py | b03efa62bd937ed3a42f2270aeed36b10cdf53de | [] | no_license | PKUHEPEWK/ssww | eec02ad7650014646e1bcb0e8787cf1514aaceca | a507a289935b51b8abf819b1b4b05476a05720dc | refs/heads/master | 2020-05-14T04:15:35.474981 | 2019-06-28T23:48:15 | 2019-06-28T23:48:15 | 181,696,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,374 | py | from WMCore.Configuration import Configuration
from CRABClient.UserUtilities import config, getUsernameFromSiteDB
config = Configuration()
config.section_("General")
config.General.requestName = 'GluGluToContinToZZTo4e_2018'
config.General.transferLogs= False
config.section_("JobType")
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'PSet.py'
config.JobType.scriptExe = 'crab_script_2018.sh'
config.JobType.inputFiles = ['crab_script_2018.py','ssww_keep_and_drop_2018.txt','ssww_output_branch_selection_2018.txt','haddnano.py'] #hadd nano will not be needed once nano tools are in cmssw
config.JobType.sendPythonFolder = True
config.section_("Data")
config.Data.inputDataset = '/GluGluToContinToZZTo4e_13TeV_MCFM701_pythia8/RunIIAutumn18NanoAODv4-Nano14Dec2018_102X_upgrade2018_realistic_v16-v1/NANOAODSIM'
#config.Data.inputDBS = 'phys03'
config.Data.inputDBS = 'global'
config.Data.splitting = 'FileBased'
#config.Data.splitting = 'EventAwareLumiBased'
config.Data.unitsPerJob = 20
config.Data.totalUnits = -1
config.Data.outLFNDirBase ='/store/user/%s/nano2018_v0' % (getUsernameFromSiteDB())
config.Data.publication = False
config.Data.outputDatasetTag = 'GluGluToContinToZZTo4e_2018'
config.section_("Site")
config.Site.storageSite = "T2_CN_Beijing"
#config.Site.storageSite = "T2_CH_CERN"
#config.section_("User")
#config.User.voGroup = 'dcms'
| [
"jiexiao@pku.edu.cn"
] | jiexiao@pku.edu.cn |
a96bb608332b2d8fe45b1a8a46753ffd072dd999 | 38cdcc151bf4e2c1c0307aec35bd02841b16615d | /exp6/myrun.py | 2f6ef1c3da32175b2596b8d8f7c93e5fcb182c9d | [] | no_license | gcolmenarejo/cmd | c8b18a3406d1672e738093136caf54edf101cae6 | d2a5b4beb3706eba98c9398ac00bab6abba3ad22 | refs/heads/master | 2020-03-31T08:06:37.451756 | 2019-03-11T11:20:14 | 2019-03-11T11:20:14 | 152,046,088 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,320 | py | from __future__ import print_function
import sys
sys.path.insert(0, '../')
import numpy as np
import pandas as pds
from preprocessing import smiles_to_seq, vectorize
import mySSVAE
import time
import csv
import tensorflow as tf
from preprocessing import get_property, canonocalize
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_absolute_error
# Start time count
start_time = time.time()
# pre-defined parameters
frac=0.5
beta=10000.
char_set=[' ','1','2','3','4','5','6','7','8','9','-','#','(',')','[',']','+','=','B','Br','c','C','Cl','F','H','I','N','n','O','o','P','p','S','s','Si','Sn']
data_uri='../data/ZINC_310k.csv'
save_uri='./zinc_model310k.ckpt'
ntrn=300000
frac_val=0.05
ntst=10000
# data preparation
print('::: data preparation')
smiles = pds.read_csv(data_uri).as_matrix()[:ntrn+ntst,0] #0: SMILES
Y = np.asarray(pds.read_csv(data_uri).as_matrix()[:ntrn+ntst,1:], dtype=np.float32) # 1: MolWT, 2: LogP, 3: QED
list_seq = smiles_to_seq(smiles, char_set)
Xs, X=vectorize(list_seq, char_set)
tstX=X[-ntst:]
tstXs=Xs[-ntst:]
tstY=Y[-ntst:]
#for n in range(50000,300001,50000):
for n in range(50000,50001):
ntrn = n
csvfile = "./train"+str(n)+".csv"
np.savetxt(csvfile, smiles[:n], fmt = '%s')
X=X[:ntrn]
Xs=Xs[:ntrn]
Y=Y[:ntrn]
nL=int(len(Y)*frac)
nU=len(Y)-nL
nL_trn=int(nL*(1-frac_val))
nL_val=nL-nL_trn
nU_trn=int(nU*(1-frac_val))
nU_val=nU-nU_trn
perm_id=np.random.permutation(len(Y))
trnX_L=X[perm_id[:nL_trn]]
trnXs_L=Xs[perm_id[:nL_trn]]
trnY_L=Y[perm_id[:nL_trn]]
valX_L=X[perm_id[nL_trn:nL_trn+nL_val]]
valXs_L=Xs[perm_id[nL_trn:nL_trn+nL_val]]
valY_L=Y[perm_id[nL_trn:nL_trn+nL_val]]
trnX_U=X[perm_id[nL_trn+nL_val:nL_trn+nL_val+nU_trn]]
trnXs_U=Xs[perm_id[nL_trn+nL_val:nL_trn+nL_val+nU_trn]]
valX_U=X[perm_id[nL_trn+nL_val+nU_trn:]]
valXs_U=Xs[perm_id[nL_trn+nL_val+nU_trn:]]
scaler_Y = StandardScaler()
scaler_Y.fit(Y)
trnY_L=scaler_Y.transform(trnY_L)
valY_L=scaler_Y.transform(valY_L)
## model training
print('::: model training, n=', str(n))
seqlen_x = X.shape[1]
dim_x = X.shape[2]
dim_y = Y.shape[1]
dim_z = 100
dim_h = 250
n_hidden = 3
batch_size = 200
# Reset the computation graph
tf.reset_default_graph()
model = mySSVAE.Model(seqlen_x = seqlen_x, dim_x = dim_x, dim_y = dim_y, dim_z = dim_z, dim_h = dim_h,
n_hidden = n_hidden, batch_size = batch_size, beta = float(beta), char_set = char_set)
with model.session:
model.train(trnX_L=trnX_L, trnXs_L=trnXs_L, trnY_L=trnY_L, trnX_U=trnX_U, trnXs_U=trnXs_U,
valX_L=valX_L, valXs_L=valXs_L, valY_L=valY_L, valX_U=valX_U, valXs_U=valXs_U)
model.saver.save(model.session, save_uri)
## property prediction performance
tstY_hat=scaler_Y.inverse_transform(model.predict(tstX))
for j in range(dim_y):
print([j, mean_absolute_error(tstY[:,j], tstY_hat[:,j])])
## unconditional generation
#smis_u = []
#for t in range(5000):
# smi = model.sampling_unconditional()
# smis_u.append(smi)
#csvfile = "./unc2-"+str(n)+".csv"
#with open(csvfile, "w") as output:
# writer = csv.writer(output, lineterminator='\n')
# for val in smis_u:
# writer.writerow([val])
#output.close()
## conditional generation (e.g. MolWt=250)
smis_c = []
yid = [0,1]
ytarget = [250.,1.]
ytarget_transform = []
for i in range(2):
ytarget_transform.append((ytarget[i]-scaler_Y.mean_[yid[i]])/np.sqrt(scaler_Y.var_[yid[i]]))
for t in range(5000):
smi = model.mysampling_conditional(yid, ytarget_transform)
smis_c.append(smi)
csvfile = "./con2-"+str(n)+".csv"
with open(csvfile, "w") as output:
writer = csv.writer(output, lineterminator='\n')
for val in smis_c:
writer.writerow([val])
output.close()
# End time count
elapsed_time = time.time() - start_time
print(elapsed_time)
print(time.strftime("%H:%M:%S", time.gmtime(elapsed_time)))
| [
"gcolmenarejo@localhost.localdomain"
] | gcolmenarejo@localhost.localdomain |
73d247de2249cedb6a8d8e51aa7607489710a3cf | e3c3d41137a2f3920e8524b6f07c18825959d98b | /C3D_pairwise_eval/all_mean.py | 63ac279d8f0627397af374dca9990556bfbd8e0f | [] | no_license | fxing328/c3d_ucf101_siamese_yilin | 080c100db89217888d369dc52094c1cfa41beae9 | 1b6a3b1b8842d645bb275ce5353a68ad5a1a4eaf | refs/heads/master | 2020-06-24T21:51:41.910015 | 2016-11-23T23:08:05 | 2016-11-23T23:08:05 | 74,618,334 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,114 | py | import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.ion()
import numpy as np
import scipy.io as scipy_io
import pdb
from collections import *
import sys
import os
import re
from pylab import *
class Graph:
def __init__(self):
self.nodes = set()
self.edges = defaultdict(list)
self.distances = {}
def add_node(self, value):
self.nodes.add(value)
def add_edge(self, from_node, to_node, distance):
self.edges[from_node].append(to_node)
#self.edges[to_node].append(from_node)
self.distances[(from_node, to_node)] = distance
def dijsktra(graph, initial):
visited = {initial: 0}
path = {}
nodes = set(graph.nodes)
while nodes:
min_node = None
for node in nodes:
if node in visited:
if min_node is None:
min_node = node
elif visited[node] < visited[min_node]:
min_node = node
if min_node is None:
break
nodes.remove(min_node)
current_weight = visited[min_node]
for edge in graph.edges[min_node]:
try:
weight = current_weight + graph.distances[(min_node, edge)]
if edge not in visited or weight < visited[edge]:
visited[edge] = weight
path[edge] = min_node
except:
pdb.set_trace()
return visited, path
def provide_graph(matrix,graph):
# assuming matrix is fat matrix otherwise transpose the matrix
#if matrix.shape[0]>= matrix.shape[1]:
# pass
#else:
# print "transpose matrix"
# return None
width = matrix.shape[0]
height = matrix.shape[1]
# add nodes plus source and destination
for i in range(matrix.size+2):
graph.add_node(i)
#add source node connection to the first ones
for i in range(matrix.shape[0]):
graph.add_edge(0,i+1,matrix[i,0])
for j in range(matrix.shape[1]-1):
for i in range(matrix.shape[0]):
for k in range(matrix.shape[0]):
if i <=k:
graph.add_edge(j*width+i+1,(j+1)*width+k+1,matrix[k,j+1])
else :
graph.add_edge(j*width+i+1,(j+1)*width+k+1,1000000000)
for i in range(matrix.shape[0]):
graph.add_edge(matrix.size+1-i,matrix.size+1,0)
return graph
def optimal_path(matrix):
#matrix = scipy_io.loadmat(sys.argv[1])['pair_matrix'].T
original_matrix = matrix
# to avoid infinity error
matrix = -np.log(matrix +0.00001)
graph_example = Graph()
# construct the DAG graph
graph_example = provide_graph(matrix,graph_example)
# get the optimal path
visited,path = dijsktra(graph_example,0)
node = matrix.size+1
# backwards throw out the optimal path
node_list = []
while node !=0:
node = path[node]
print node
node_list.append(node)
matrix_index= np.zeros(matrix.shape)
value = 0
for i in range(1,len(node_list)):
x = np.floor(node_list[i]/matrix.shape[0])
y = node_list[i] - matrix.shape[0]*x
print 'x:' + str(x) + 'y:' + str(y)
matrix_index[int(y),int(x)] = 1
value+= matrix[int(y),int(x)]
value = np.exp(-(1.0/matrix.shape[1])*value)
# plt.subplot(2,1,1)
# plt.imshow(original_matrix)
# plt.subplot(2,1,2)
# plt.imshow(matrix_index)
# plt.savefig(image_name)
return value
if __name__ == '__main__':
videopath = '/scratch/xf362/yilin_revised_pipeline_siamense/C3D_pairwise_eval/data_h5/'
datapath = '/scratch/xf362/yilin_revised_pipeline_siamense/C3D_pairwise_eval/data_h5/'
respath = '/scratch/xf362/yilin_revised_pipeline_siamense/C3D_pairwise_eval/res_matrix/'
#for scene_name in sort(os.listdir(datapath)):
#scene_name = 'Chinese_new_year_nyc_2016'
scene_name = 'Baltimore_Riot_2015'
video_pair = np.zeros((40,40))
i = 0
j = 0
#pdb.set_trace()
for video_file1 in sort(os.listdir(videopath+scene_name)):
if re.search('.mp4',video_file1) is not None:
i = i+1
video_name1 = video_file1[:-7]
for video_file2 in sort(os.listdir(videopath+scene_name)):
if re.search('.mp4',video_file2) is not None:
j = j+1
video_name2 = video_file2[:-7]
pair_matrix = scipy_io.loadmat(datapath+scene_name+'/'+video_name1+'_'+video_name2+'.mat')['pair_matrix']
#pair_matrix2 = scipy_io.loadmat(datapath+scene_name+'/'+video_name2+'_'+video_name1+'.mat')['pair_matrix']
#pair_matrix = (pair_matrix1+pair_matrix2.T)/2
#return None
#image_name ='./tmp/'+ str(i)+'_'+str(j)+'.pdf'
video_pair[i-1,j-1] = np.mean(pair_matrix)
if j == 40:
j = 0
res = {}
res['video_pair'] = video_pair
scipy_io.savemat(open(respath+scene_name+'_mean.new.mat','wb'),res)
print 1
| [
"fx_328@hotmail.com"
] | fx_328@hotmail.com |
9462a18277d9b4f90b25c5ab35a7baf388d7aba4 | b953909018be86cf8cdf328e2b13395c1dbe28c0 | /apps/xadmin/plugins/inline.py | cdc764aa4faeabe0c94f3cdcb5e6bb2bc7eb78b8 | [] | no_license | wangyong240/mes | 06ce26d146aebe0b0103dda4fdd198c3cefc6014 | 12d7321c1b96ae0fdd8f26029462e1943a500c01 | refs/heads/master | 2023-01-01T13:29:29.853063 | 2020-09-19T01:19:22 | 2020-09-19T01:19:22 | 296,762,233 | 1 | 0 | null | 2020-09-19T01:20:05 | 2020-09-19T01:20:04 | null | UTF-8 | Python | false | false | 16,489 | py | import copy
import inspect
from django import forms
from django.forms.formsets import all_valid, DELETION_FIELD_NAME
from django.forms.models import inlineformset_factory, BaseInlineFormSet
from django.contrib.contenttypes.generic import BaseGenericInlineFormSet, generic_inlineformset_factory
from django.template import loader
from django.template.loader import render_to_string
from xadmin.layout import FormHelper, Layout, flatatt, Container, Column, Field, Fieldset
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, ModelFormAdminView, DetailAdminView, filter_hook
class ShowField(Field):
template = "xadmin/layout/field_value.html"
def __init__(self, admin_view, *args, **kwargs):
super(ShowField, self).__init__(*args, **kwargs)
self.admin_view = admin_view
if admin_view.style == 'table':
self.template = "xadmin/layout/field_value_td.html"
def render(self, form, form_style, context):
html = ''
detail = form.detail
for field in self.fields:
if not isinstance(form.fields[field].widget, forms.HiddenInput):
result = detail.get_field_result(field)
html += loader.render_to_string(
self.template, {'field': form[field], 'result': result})
return html
class DeleteField(Field):
def render(self, form, form_style, context):
if form.instance.pk:
self.attrs['type'] = 'hidden'
return super(DeleteField, self).render(form, form_style, context)
else:
return ""
class TDField(Field):
template = "xadmin/layout/td-field.html"
class InlineStyleManager(object):
inline_styles = {}
def register_style(self, name, style):
self.inline_styles[name] = style
def get_style(self, name='stacked'):
return self.inline_styles.get(name)
style_manager = InlineStyleManager()
class InlineStyle(object):
template = 'xadmin/edit_inline/stacked.html'
def __init__(self, view, formset):
self.view = view
self.formset = formset
def update_layout(self, helper):
pass
def get_attrs(self):
return {}
style_manager.register_style('stacked', InlineStyle)
class OneInlineStyle(InlineStyle):
template = 'xadmin/edit_inline/one.html'
style_manager.register_style("one", OneInlineStyle)
class AccInlineStyle(InlineStyle):
template = 'xadmin/edit_inline/accordion.html'
style_manager.register_style("accordion", AccInlineStyle)
class TabInlineStyle(InlineStyle):
template = 'xadmin/edit_inline/tab.html'
style_manager.register_style("tab", TabInlineStyle)
class TableInlineStyle(InlineStyle):
template = 'xadmin/edit_inline/tabular.html'
def update_layout(self, helper):
helper.add_layout(
Layout(*[TDField(f) for f in self.formset[0].fields.keys()]))
def get_attrs(self):
fields = []
readonly_fields = []
if len(self.formset):
fields = [f for k, f in self.formset[0].fields.items() if k != DELETION_FIELD_NAME]
readonly_fields = [f for f in getattr(self.formset[0], 'readonly_fields', [])]
return {
'fields': fields,
'readonly_fields': readonly_fields
}
style_manager.register_style("table", TableInlineStyle)
def replace_field_to_value(layout, av):
if layout:
for i, lo in enumerate(layout.fields):
if isinstance(lo, Field) or issubclass(lo.__class__, Field):
layout.fields[i] = ShowField(av, *lo.fields, **lo.attrs)
elif isinstance(lo, basestring):
layout.fields[i] = ShowField(av, lo)
elif hasattr(lo, 'get_field_names'):
replace_field_to_value(lo, av)
class InlineModelAdmin(ModelFormAdminView):
fk_name = None
formset = BaseInlineFormSet
extra = 3
max_num = None
can_delete = True
fields = []
admin_view = None
style = 'stacked'
def init(self, admin_view):
self.admin_view = admin_view
self.parent_model = admin_view.model
self.org_obj = getattr(admin_view, 'org_obj', None)
self.model_instance = self.org_obj or admin_view.model()
return self
@filter_hook
def get_formset(self, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields())
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# InlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we use None, since that's the actual
# default
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission()
defaults = {
"form": self.form,
"formset": self.formset,
"fk_name": self.fk_name,
"exclude": exclude,
"formfield_callback": self.formfield_for_dbfield,
"extra": self.extra,
"max_num": self.max_num,
"can_delete": can_delete,
}
defaults.update(kwargs)
return inlineformset_factory(self.parent_model, self.model, **defaults)
@filter_hook
def instance_form(self, **kwargs):
formset = self.get_formset(**kwargs)
attrs = {
'instance': self.model_instance,
'queryset': self.queryset()
}
if self.request_method == 'post':
attrs.update({
'data': self.request.POST, 'files': self.request.FILES,
'save_as_new': "_saveasnew" in self.request.POST
})
instance = formset(**attrs)
instance.view = self
helper = FormHelper()
helper.form_tag = False
# override form method to prevent render csrf_token in inline forms, see template 'bootstrap/whole_uni_form.html'
helper.form_method = 'get'
style = style_manager.get_style(
'one' if self.max_num == 1 else self.style)(self, instance)
style.name = self.style
if len(instance):
layout = copy.deepcopy(self.form_layout)
if layout is None:
layout = Layout(*instance[0].fields.keys())
elif type(layout) in (list, tuple) and len(layout) > 0:
layout = Layout(*layout)
rendered_fields = [i[1] for i in layout.get_field_names()]
layout.extend([f for f in instance[0]
.fields.keys() if f not in rendered_fields])
helper.add_layout(layout)
style.update_layout(helper)
# replace delete field with Dynamic field, for hidden delete field when instance is NEW.
helper[DELETION_FIELD_NAME].wrap(DeleteField)
instance.helper = helper
instance.style = style
readonly_fields = self.get_readonly_fields()
if readonly_fields:
for form in instance:
form.readonly_fields = []
inst = form.save(commit=False)
if inst:
for readonly_field in readonly_fields:
value = None
label = None
if readonly_field in inst._meta.get_all_field_names():
label = inst._meta.get_field_by_name(readonly_field)[0].verbose_name
value = unicode(getattr(inst, readonly_field))
elif inspect.ismethod(getattr(inst, readonly_field, None)):
value = getattr(inst, readonly_field)()
label = getattr(getattr(inst, readonly_field), 'short_description', readonly_field)
if value:
form.readonly_fields.append({'label': label, 'contents': value})
return instance
def has_auto_field(self, form):
if form._meta.model._meta.has_auto_field:
return True
for parent in form._meta.model._meta.get_parent_list():
if parent._meta.has_auto_field:
return True
return False
def queryset(self):
queryset = super(InlineModelAdmin, self).queryset()
if not self.has_change_permission() and not self.has_view_permission():
queryset = queryset.none()
return queryset
def has_add_permission(self):
if self.opts.auto_created:
return self.has_change_permission()
return self.user.has_perm(
self.opts.app_label + '.' + self.opts.get_add_permission())
def has_change_permission(self):
opts = self.opts
if opts.auto_created:
for field in opts.fields:
if field.rel and field.rel.to != self.parent_model:
opts = field.rel.to._meta
break
return self.user.has_perm(
opts.app_label + '.' + opts.get_change_permission())
def has_delete_permission(self):
if self.opts.auto_created:
return self.has_change_permission()
return self.user.has_perm(
self.opts.app_label + '.' + self.opts.get_delete_permission())
class GenericInlineModelAdmin(InlineModelAdmin):
ct_field = "content_type"
ct_fk_field = "object_id"
formset = BaseGenericInlineFormSet
def get_formset(self, **kwargs):
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields())
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# GenericInlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission()
defaults = {
"ct_field": self.ct_field,
"fk_field": self.ct_fk_field,
"form": self.form,
"formfield_callback": self.formfield_for_dbfield,
"formset": self.formset,
"extra": self.extra,
"can_delete": can_delete,
"can_order": False,
"max_num": self.max_num,
"exclude": exclude
}
defaults.update(kwargs)
return generic_inlineformset_factory(self.model, **defaults)
class InlineFormset(Fieldset):
def __init__(self, formset, allow_blank=False, **kwargs):
self.fields = []
self.css_class = kwargs.pop('css_class', '')
self.css_id = "%s-group" % formset.prefix
self.template = formset.style.template
self.inline_style = formset.style.name
if allow_blank and len(formset) == 0:
self.template = 'xadmin/edit_inline/blank.html'
self.inline_style = 'blank'
self.formset = formset
self.model = formset.model
self.opts = formset.model._meta
self.flat_attrs = flatatt(kwargs)
self.extra_attrs = formset.style.get_attrs()
def render(self, form, form_style, context):
return render_to_string(
self.template, dict({'formset': self, 'prefix': self.formset.prefix, 'inline_style': self.inline_style}, **self.extra_attrs),
context_instance=context)
class Inline(Fieldset):
def __init__(self, rel_model):
self.model = rel_model
self.fields = []
def render(self, form, form_style, context):
return ""
def get_first_field(layout, clz):
for layout_object in layout.fields:
if issubclass(layout_object.__class__, clz):
return layout_object
elif hasattr(layout_object, 'get_field_names'):
gf = get_first_field(layout_object, clz)
if gf:
return gf
def replace_inline_objects(layout, fs):
if not fs:
return
for i, layout_object in enumerate(layout.fields):
if isinstance(layout_object, Inline) and layout_object.model in fs:
layout.fields[i] = fs.pop(layout_object.model)
elif hasattr(layout_object, 'get_field_names'):
replace_inline_objects(layout_object, fs)
class InlineFormsetPlugin(BaseAdminPlugin):
inlines = []
@property
def inline_instances(self):
if not hasattr(self, '_inline_instances'):
inline_instances = []
for inline_class in self.inlines:
inline = self.admin_view.get_view(
(getattr(inline_class, 'generic_inline', False) and GenericInlineModelAdmin or InlineModelAdmin),
inline_class).init(self.admin_view)
if not (inline.has_add_permission() or
inline.has_change_permission() or
inline.has_delete_permission() or
inline.has_view_permission()):
continue
if not inline.has_add_permission():
inline.max_num = 0
inline_instances.append(inline)
self._inline_instances = inline_instances
return self._inline_instances
def instance_forms(self, ret):
self.formsets = []
for inline in self.inline_instances:
if inline.has_change_permission():
self.formsets.append(inline.instance_form())
else:
self.formsets.append(self._get_detail_formset_instance(inline))
self.admin_view.formsets = self.formsets
def valid_forms(self, result):
return all_valid(self.formsets) and result
def save_related(self):
for formset in self.formsets:
formset.instance = self.admin_view.new_obj
formset.save()
def get_context(self, context):
context['inline_formsets'] = self.formsets
return context
def get_error_list(self, errors):
for fs in self.formsets:
errors.extend(fs.non_form_errors())
for errors_in_inline_form in fs.errors:
errors.extend(errors_in_inline_form.values())
return errors
def get_form_layout(self, layout):
allow_blank = isinstance(self.admin_view, DetailAdminView)
fs = dict(
[(f.model, InlineFormset(f, allow_blank)) for f in self.formsets])
replace_inline_objects(layout, fs)
if fs:
container = get_first_field(layout, Column)
if not container:
container = get_first_field(layout, Container)
if not container:
container = layout
for fs in fs.values():
container.append(fs)
return layout
def get_media(self, media):
for fs in self.formsets:
media = media + fs.media
if self.formsets:
media = media + self.vendor(
'xadmin.plugin.formset.js', 'xadmin.plugin.formset.css')
return media
def _get_detail_formset_instance(self, inline):
formset = inline.instance_form(extra=0, max_num=0, can_delete=0)
formset.detail_page = True
if True:
replace_field_to_value(formset.helper.layout, inline)
model = inline.model
opts = model._meta
fake_admin_class = type(str('%s%sFakeAdmin' % (opts.app_label, opts.module_name)), (object, ), {'model': model})
for form in formset.forms:
instance = form.instance
if instance.pk:
form.detail = self.get_view(
DetailAdminUtil, fake_admin_class, instance)
return formset
class DetailAdminUtil(DetailAdminView):
def init_request(self, obj):
self.obj = obj
self.org_obj = obj
class DetailInlineFormsetPlugin(InlineFormsetPlugin):
def get_model_form(self, form, **kwargs):
self.formsets = [self._get_detail_formset_instance(
inline) for inline in self.inline_instances]
return form
site.register_plugin(InlineFormsetPlugin, ModelFormAdminView)
site.register_plugin(DetailInlineFormsetPlugin, DetailAdminView)
| [
"70498306+wangyong240@users.noreply.github.com"
] | 70498306+wangyong240@users.noreply.github.com |
e7aaed69e2edaaa3f2386bcc9d7eab2f6d43665d | 5675ea351e805a7e83352eb4e1fba3f9f5d98dfa | /SISAB.py | 4c362e71e2953ac72feadbc5651de47815b5f14f | [] | no_license | giapsunb/Extract-Sisab | 4f41215523e43ea93578d69fc2fe2b1944b9c987 | 180c96c4d7d6b7c08331d4b7844cdd850b129969 | refs/heads/main | 2023-08-05T19:06:22.137454 | 2021-09-22T01:07:18 | 2021-09-22T01:07:18 | 409,021,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,213 | py | from typing import Iterable, Tuple, Union, Dict, TextIO
from collections.abc import Iterable as Iter
import requests as req
from bs4 import BeautifulSoup
from bs4.element import Tag
def must_set(*options):
def wrapper(fun):
def check(object, *args, **kwargs):
if any(map(lambda o: len(object.__getattribute__(o)) == 0, options)):
raise AssertionError(
'Os atributos ({}) não estão configurados'.format(', '.join(options)))
return fun(object, *args, **kwargs)
return check
return wrapper
class Sisab: # {{{
HOST = "https://sisab.saude.gov.br"
URL = HOST + "/paginas/acessoRestrito/relatorio/federal/indicadores/indicadorPainel.xhtml"
def __init__(self): # {{{
super().__init__()
self.__view_state__ = ''
self.__last_request__ = ''
self.__option_key__ = ''
self.__area_options__: Dict[str, str] = dict()
self.__national_options__: Dict[str, str] = dict()
self.__region_options__: Dict[str, str] = dict()
self.__state_options__: Dict[str, str] = dict()
self.__municipality_options__: Dict[str, str] = dict()
self.__index_options__: Dict[str, str] = dict()
self.__period_options__: Dict[str, str] = dict()
self.__view_options__: Dict[str, str] = dict()
self.__index__ = '' # Indicador
self.__period__ = '' # Período da pesquisa (precisa ser configurado)
self.__view__ = '' # Visão de equipe
self.__area__: str = ''
self.__region__: Union[str, Tuple[str, ...]] = ''
self.__state__: Union[str, Tuple[str, ...]] = ''
self.__municipality__: Union[str, Tuple[str, ...]] = ''
self.__cookies__ = []
self.__get_cookies__()
self.post()
soup = BeautifulSoup(self.__last_request__, 'html.parser')
self.__area_options__ = {k: v for k, v in map(
lambda o: (o.attrs['value'], o.get_text()),
soup.select('select#selectLinha option')
)}
# }}}
# {{{ Getters
@property
def area_options(self) -> Dict[str,
str]: return self.__area_options__.copy()
@property
def national_options(
self) -> Dict[str, str]: return self.__national_options__.copy()
@property
def region_options(
self) -> Dict[str, str]: return self.__region_options__.copy()
@property
def state_options(self) -> Dict[str,
str]: return self.__state_options__.copy()
@property
def municipality_options(
self) -> Dict[str, str]: return self.__municipality_options__.copy()
@property
def index_options(self) -> Dict[str,
str]: return self.__index_options__.copy()
@property
def period_options(
self) -> Dict[str, str]: return self.__period_options__.copy()
@property
def view_options(self) -> Dict[str,
str]: return self.__view_options__.copy()
@property
def area(self): return self.__area__
@property
def region(self): return self.__region__
@property
def state(self): return self.__state__
@property
def municipality(self): return self.__municipality__
@property
def index(self): return self.__index__
@property
def period(self): return self.__period__
@property
def view(self): return self.__view__
# }}}
# {{{ Setters
@area.setter
def area(self, value: Union[str, int]):
if isinstance(value, str):
if value not in self.area_options:
raise ValueError('Valor inválido para área')
self.__area__ = value
elif isinstance(value, int):
try:
self.__area__ = list(self.area_options.keys())[value]
except IndexError:
raise IndexError('Indíce inválido para área')
else:
raise TypeError('Tipo inválido para área')
@region.setter
def region(self, value: Union[str, Iterable[str], Iterable[int]]):
if isinstance(value, str):
if value not in self.region_options:
raise ValueError('Valor inválido para região')
self.__region__ = value
elif isinstance(value, Iter):
if all(map(lambda v: type(v) == int, value)):
try:
self.__region__ = tuple([k for i, k in enumerate(
self.region_options.keys()) if i in value]) # type: ignore
except IndexError:
raise IndexError('Indíce inválido para região')
else:
if any(map(lambda o: o not in self.region_options, value)):
raise ValueError('Valor inválido para região')
self.__region__ = tuple(value) # type: ignore
else:
raise TypeError('Tipo inválido para região')
@state.setter
def state(self, value: Union[str, Iterable[str], Iterable[int]]):
if isinstance(value, str):
if value not in self.state_options:
raise ValueError('Valor inválido para estado')
self.__state__ = value
elif isinstance(value, Iter):
if all(map(lambda v: type(v) == int, value)):
try:
self.__state__ = tuple([k for i, k in enumerate(
self.state_options.keys()) if i in value]) # type: ignore
except IndexError:
raise IndexError('Indíce inválido para estado')
else:
if any(map(lambda o: o not in self.state_options, value)):
raise ValueError('Valor inválido para estado')
self.__state__ = tuple(value) # type: ignore
else:
raise TypeError('Tipo inválido para estado')
@municipality.setter
def municipality(self, value: Union[str, Iterable[str], Iterable[int]]):
if isinstance(value, str):
if value not in self.municipality_options:
raise ValueError('Valor inválido para município')
self.__municipality__ = value
elif isinstance(value, Iter):
if all(map(lambda v: type(v) == int, value)):
try:
self.__municipality__ = tuple([k for i, k in enumerate(
self.municipality_options.keys()) if i in value]) # type: ignore
except IndexError:
raise IndexError('Indíce inválido para município')
else:
if any(map(lambda o: o not in self.municipality_options, value)):
raise ValueError('Valor inválido para município')
self.__municipality__ = tuple(value) # type: ignore
else:
raise TypeError('Tipo inválido para município')
@period.setter
def period(self, value: Union[str, int]):
if isinstance(value, str):
if value not in self.period_options:
raise ValueError('Valor inválido para período')
self.__period__ = value
elif isinstance(value, int):
try:
self.__period__ = list(self.period_options.keys())[value]
except IndexError:
raise IndexError(
'Opção de índice {} não existe para período'.format(value))
else:
raise TypeError('Tipo não suportado para configurar período')
@index.setter
def index(self, value: Union[str, int]):
if isinstance(value, str):
if value not in self.index_options:
raise ValueError('Valor inválido para índice')
self.__index__ = value
elif isinstance(value, int):
try:
self.__index__ = list(self.index_options.keys())[value]
except IndexError:
raise IndexError(
'Opção de índice {} não existe para índice'.format(value))
else:
raise TypeError('Tipo não suportado para configurar índice')
@view.setter
def view(self, value: Union[str, int]):
if isinstance(value, str):
if value not in self.view_options:
raise ValueError('Valor inválido para visão de equipe')
self.__view__ = value
elif isinstance(value, int):
try:
self.__view__ = list(self.view_options.keys())[value]
except IndexError:
raise IndexError(
'Opção de índice {} não existe para visão de equipe'.format(value))
else:
raise TypeError('Tipo não suportado para configurar índice')
# }}}
def post( # {{{
self,
params: dict = dict(),
output: Union[str, TextIO] = None,
strip: bool = False
) -> None:
"""Faz uma requisição POST ao Sisab como pelo navegador. {{{
@param params
Os pares de chave-valor passados na URL da requisição
São mesclados com parâmetros padrão (e podem substituí-los)
@param output
O nome do arquivo para salvar a resposta da requisição
Quando não é passado, a resposta não é salva
}}} """
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Host": "sisab.saude.gov.br",
"User-Agent":
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:91.0)" +
" Gecko/20100101 Firefox/91.0",
"Accept":
"text/html,application/xhtml+xml,application/xml;" +
"q=0.9,image/webp,*/*;q=0.8",
"Accept-Language": "pt-BR,en-US;q=0.8,en;q=0.5,pt;q=0.3",
"Accept-Encoding": "gzip, deflate, br",
"Origin": "https://sisab.saude.gov.br",
"Connection": "keep-alive",
"Referer":
"https://sisab.saude.gov.br/paginas/acessoRestrito/" +
"relatorio/federal/indicadores/indicadorPainel.xhtml",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "same-origin",
"Sec-Fetch-User": "?1",
"Cookie": ';'.join(self.__cookies__),
}
default = {
"j_idt50": "j_idt50",
"javax.faces.ViewState": self.__view_state__,
"coIndicador": self.index,
"quadrimestre": self.period,
"visaoEquipe": self.view
}
default.update(params)
res = req.post(Sisab.URL, headers=headers, params=default)
content_type, _ = res.headers['Content-Type'].split(';')
# Somente faz o parse do HTML se a resposta for xml
text = res.text
if content_type == 'text/xml':
# FIXME: A resposta vem no encoding ISO-8859-1, não UTF-8
# self.__last_request__ = bytes(res.text, encoding=encoding.split('=')[1]).decode('utf-8')
self.__last_request__ = text
self.__update_view_state__(text)
elif content_type == 'text/csv':
if isinstance(strip, bool):
_, text, _ = text.split('\n\n\n')
_, text = text.split('\n', maxsplit=1)
else:
text = text.split('\n\n\n')
strip = [i for i, s in enumerate(strip) if s]
text = '\n'.join([t for i, t in enumerate(text) if i in strip])
if output is not None:
if isinstance(output, str):
with open(output, 'w') as f:
f.write(text + '\n')
else:
output.write(text + '\n')
# }}}
def __update_view_state__(self, data): # {{{
soup = BeautifulSoup(data, 'html.parser')
el = soup.find('input', id='javax.faces.ViewState')
if isinstance(el, Tag):
self.__view_state__ = el.attrs.get('value', '')
else:
el = soup.find('update', id='javax.faces.ViewState')
if isinstance(el, Tag):
self.__view_state__ = el.text
else:
raise ValueError('Não foi possível encontrar o ViewState')
# }}}
def __get_cookies__(self): # {{{
res = req.get(Sisab.URL)
self.__cookies__ = list(map(
lambda c: c.split(';')[0],
res.headers['Set-Cookie'].split(', ')
))
self.__last_request__ = res.text
self.__update_view_state__(res.text)
soup = BeautifulSoup(res.text, 'html.parser')
el = soup.find('select', id='quadrimestre')
if isinstance(el, Tag):
el = el.findAll('option')
self.__period_options__ = {
o.get('value'): o.text
for o in el
if isinstance(o, Tag) and o.get('value') != ''
} # type: ignore
el = soup.find('select', id='coIndicador')
if isinstance(el, Tag):
el = el.findAll('option')
self.__index_options__ = {
o.get('value'): o.text
for o in el
if isinstance(o, Tag) and o.get('value') != ''
} # type: ignore
el = soup.find('select', id='visaoEquipe')
if isinstance(el, Tag):
el = el.findAll('option')
self.__view_options__ = {
o.get('value'): o.text
for o in el
if isinstance(o, Tag) and o.get('value') != ''
} # type: ignore
# }}}
@must_set('area')
def update_area(self): # {{{
self.post({
'selectLinha': self.area,
'javax.faces.source': 'selectLinha',
'javax.faces.partial.event': 'change',
'javax.faces.partial.execute': 'selectLinha selectLinha',
'javax.faces.partial.render': 'regioes script',
'javax.faces.behavior.event': 'valueChange',
'javax.faces.partial.ajax': 'true'
})
# }}}
@must_set('area')
def get_area(self, file: str, strip: bool = False): # {{{
s.post({
'selectLinha': self.area,
'j_idt84': 'j_idt84'
}, file, strip)
# }}}
@must_set('area')
def update_region(self): # {{{
self.post({
'selectLinha': self.area,
'javax.faces.source': 'selectLinha',
'javax.faces.partial.event': 'change',
'javax.faces.partial.execute': 'selectLinha selectLinha',
'javax.faces.partial.render': 'regioes script',
'javax.faces.behavior.event': 'valueChange',
'javax.faces.partial.ajax': 'true'
})
soup = BeautifulSoup(self.__last_request__, 'html.parser')
el = soup.find('update', id='regioes')
if isinstance(el, Tag):
soup = BeautifulSoup(el.text, 'html.parser').findAll('option')
self.__region_options__ = {o.get('value'): o.text for o in soup if isinstance(
o, Tag) and o.get('value') != ''} # type: ignore
else:
raise TypeError('Não foi possível encontrar a Tag de id "regioes"')
# }}}
@must_set('area', 'region')
def get_region(self, file: str, strip: bool = False): # {{{
s.post({
'selectLinha': self.area,
'regiao': self.region,
'j_idt84': 'j_idt84'
}, file, strip)
# }}}
@must_set('area')
def update_state(self, look_into='estados'): # {{{
self.post({
'selectLinha': self.area,
'javax.faces.source': 'selectLinha',
'javax.faces.partial.event': 'change',
'javax.faces.partial.execute': 'selectLinha selectLinha',
'javax.faces.partial.render': 'regioes script',
'javax.faces.behavior.event': 'valueChange',
'javax.faces.partial.ajax': 'true'
})
soup = BeautifulSoup(self.__last_request__, 'html.parser')
el = soup.find('update', id='regioes')
if isinstance(el, Tag):
soup = BeautifulSoup(el.text, 'html.parser').find(
'select', id=look_into)
if isinstance(soup, Tag):
soup = soup.findAll('option')
self.__state_options__ = {o.get('value'): o.text for o in soup if isinstance(
o, Tag) and o.get('value') != ''} # type: ignore
else:
raise TypeError(
'Não foi possível encontrar a Tag de id', look_into)
else:
raise TypeError('Não foi possível encontrar a Tag de id "regioes"')
# }}}
@must_set('area')
def get_state(self, file: str, strip: bool = False): # {{{
params = {
'selectLinha': self.area,
'j_idt84': 'j_idt84'
}
if len(self.state) > 0:
params['estados'] = self.state # type: ignore
s.post(params, file, strip)
# }}}
@must_set('area', 'state')
def update_municipality(self): # {{{
self.post({
'selectLinha': self.area,
'estadoMunicipio': self.state,
'javax.faces.source': 'estadoMunicipio',
'javax.faces.partial.event': 'change',
'javax.faces.partial.execute': 'estadoMunicipio estadoMunicipio',
'javax.faces.partial.render': 'regioes script',
'javax.faces.behavior.event': 'valueChange',
'javax.faces.partial.ajax': 'true'
})
soup = BeautifulSoup(self.__last_request__, 'html.parser')
el = soup.find('update', id='regioes')
if isinstance(el, Tag):
soup = BeautifulSoup(el.text, 'html.parser').find(
'select', id='municipios')
if isinstance(soup, Tag):
soup = soup.findAll('option')
self.__municipality_options__ = {o.get('value'): o.text for o in soup if isinstance(
o, Tag) and o.get('value') != ''} # type: ignore
else:
raise TypeError(
'Não foi possível encontrar a Tag de id "municipios"')
else:
raise TypeError('Não foi possível encontrar a Tag de id "regioes"')
# }}}
@must_set('area', 'state')
def get_municipality(self, file: str, strip: bool = False): # {{{
params = {
'selectLinha': self.area,
'estadoMunicipio': self.state,
'j_idt84': 'j_idt84'
}
if len(self.municipality) > 0:
params['municipios'] = self.municipality
s.post(params, file, strip=strip)
# }}}
# }}}
def get_data(self, area, file, strip=True, **options):
# Faz as requisições desde o começo até o fim
self.area = area
if 'period' in options:
self.period = options['period']
if 'index' in options:
self.index = options['index']
if 'view' in options:
self.view = options['view']
if self.area == 'nacional':
self.get_area(file, strip=strip)
elif self.area == 'regiao':
self.update_region()
self.region = options['region']
self.get_region(file, strip=strip)
elif self.area == 'uf':
self.update_state()
if 'state' in options:
self.state = options['state']
self.get_state(file, strip=strip)
elif self.area == 'ibge':
self.update_state(look_into='estadoMunicipio')
self.state = options['state']
self.update_municipality()
if 'municipality' in options:
self.municipality = options['municipality']
self.get_municipality(file, strip=strip)
if __name__ == '__main__':
import shutil
# view_options = Visao, period_options = Quadrimestres, state_options = Estados, area_options = Nivel de Visualizacao, Indicador
s = Sisab()
s.area = 'ibge'
s.view = 0
s.update_state(look_into='estadoMunicipio')
print('1) ARQUIVO GLOBAL CSV', '2) PEQUENOS ARQUIVOS', sep='\n')
resposta = int(input())
print('\033[2J')
tc, tl = shutil.get_terminal_size()
if resposta == 1:
max_bar = tc - 9
with open('out.csv', 'w') as file:
# for p, i, view in [(p, i, view) for p in s.period_options for i in s.index_options for view in s.view_options]:
for p, i in [(p, i) for p in s.period_options for i in s.index_options]:
strip = [True, True, False]
file.write('\n')
print() # Texto "Baixando ..."
print('Período:'.ljust(15), s.period_options[p][:tc - 16])
print('Indicador:'.ljust(15), s.index_options[i][:tc - 16])
print('Visualização:'.ljust(15),
s.view_options[s.view][:tc - 16])
print() # Barra de progresso
print('\033[5A', end='')
for idx, uf in enumerate(s.state_options):
s.state = uf
s.update_municipality()
s.get_data(s.area, file, state=uf,
period=p, index=i, view=s.view, strip=strip)
strip = True
print('\033[2K\r\033[1mBAIXANDO\033[31m',
s.state_options[uf], '\033[0m\033[4B', end='')
pct = (idx * max_bar) / len(s.state_options)
pct_str = '{:3.2f}%'.format(pct)
pct_str = pct_str.rjust(7)
decimal = pct - int(pct)
pct_half = '▌' if decimal > 0.5 else ''
print(
# Limpa a linha e imprime a porcentagem
'\033[2K\r{}'.format(pct_str),
# Imprime a barra de progresso
'█' * int(pct) + pct_half + '_' * \
(max_bar - int(pct) - len(pct_half)),
'\033[4A', end='') # Volta para a linha "BAIXANDO ..."
print('\033[2K\r\033[1mFINALIZADO!!\033[0m\033[4B', end='')
print('\033[2K\r100.00% ' + '█' * max_bar)
print()
elif resposta == 2:
for uf, p, i, view in [(uf, p, i, view) for uf in s.state_options for p in s.period_options for i in s.index_options for view in s.view_options]:
s.state = uf
s.update_municipality()
s.get_data(s.area, '{}{}{}{}.csv'.format(
s.state_options[uf],
s.period_options[p],
s.index_options[i],
s.view_options[view]),
state=uf,
period=p, index=i, view=view)
print("\033[2K\r BAIXANDO",
s.state_options[uf],
s.period_options[p],
s.index_options[i],
s.view_options[view],
end='')
| [
"noreply@github.com"
] | giapsunb.noreply@github.com |
f2110dbbd89d74b18d31ba38453abe0f7578aebb | 60fa442ae76b960ab21b10fb527c0eac85cdc587 | /phenix/crawl_refines_print_Rfactor.py | 3865946eb9d8dfc4cf54c28e0f99554fc655a411 | [] | no_license | pjanowski/Pawel_PhD_Scripts | 8e6c2b92b492f9cacf425327a01faaceb27bb87d | 5f9b1735ca6da8fdf0946d6748f3da7d3d723d5e | refs/heads/master | 2021-01-10T06:15:30.287053 | 2015-11-16T04:04:07 | 2015-11-16T04:04:07 | 46,250,317 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | import os
import glob
dbase="/media/My Book/Marco/rigi_145392/p6522"
ds = next(os.walk(dbase))[1]
ds = [i for i in ds if i.startswith('Refine')]
f = lambda x: (x,int(x.split('_')[-1]))
ds = map(f,ds)
ds.sort(key=lambda x: x[-1])
ds = [ i[0] for i in ds ]
for d in ds:
logfile = glob.glob('%s/%s/*log' %(dbase,d))
if len(logfile) ==0:
continue
f=open(logfile[0], 'r')
l=f.readlines()
print d
print l[-2],
print l[-1] | [
"pawelrc@gmail.com"
] | pawelrc@gmail.com |
6b33fad5a0bf1cd29c8c5b2f40ddf62a7654bd53 | 9d7b67a9795951681dc2f13c3edefade049652ee | /plotting.py | 7f26a66d7708241cfa123e751ab8ea8eb883f6a1 | [] | no_license | joshikajal/insta | 443d0803870c3e2102e41437a0fb521fb0eacdba | 47886a5b194c012b996f699c41736ae29571deca | refs/heads/master | 2021-01-01T16:29:25.570233 | 2017-07-20T14:40:55 | 2017-07-20T14:40:55 | 97,846,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | import numpy as n
import matplotlib.pyplot as p
fig = p.Figure()
ax=p.axes()
def plot_fig(hash_tags,count):
labels = hash_tags # Mentioning the labels(hash_tags) for pie-chart
size = count # Mentioning the sizes of the labels
colors = ['r','g','b','m','y','c','w'] # Colours to be used
explode = []
for temp in labels:
explode.append(0) # Use to denote slicing
p.pie(size,explode,labels,colors,startangle=120,shadow=False,radius=1.0,autopct = "%1.2f%%",pctdistance=.6,) # Plotting the plot
p.axis("equal") # Shows the pie-chart in circle
p.legend(labels) # Legends displayed
p.tight_layout()
p.show() | [
"noreply@github.com"
] | joshikajal.noreply@github.com |
6ca4e2e8cbb599a84de695311c734db68e0591c3 | 1d79b1e44689c4bc1df68398151ffef990f0f0e7 | /blackjack/blackjack.py | 637ab66510dfb794ee077a3d2c2a4baf645b7ecc | [] | no_license | sourabbmk/mini_p | b14f4102e431147ca7f82714a9ca3426f26fb2a3 | 6a20a89a28796ccc4023a780f5506e6db47df02e | refs/heads/master | 2020-04-14T22:59:42.357347 | 2019-01-05T06:04:43 | 2019-01-05T06:04:43 | 164,186,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,705 | py | import random
#define deck
def deck():
#create empty deck list
deck=[]
#take suits and rank and append in empty deck list
for suit in ['H','S','D','C']:
for rank in ['A','2','3','4','5','6','7','8','9','T','J','Q','K']:
deck.append(suit+rank)
#shuffle the deck
random.shuffle(deck)
return deck
# Takes in player's card and returns his total points.
def pointcount(mycards):
totalcount=0
acecount=0
for i in mycards:
#value for 10,j,q,k is 10 .. add it to total count
if (i[1] == 'T' or i[1] == 'J' or i[1] == 'Q' or i[1] == 'K'):
totalcount+=10
#value is number .. add it to total count
elif (i[1]!='A'):
totalcount+=int(i[1])
#value is an ace .. then add it to ACECOUNT
else:
acecount+=1
#calculate the value of ace if it's 1 or 11.
#if there's only 1 ace and total is 10+ then ace value is 11
if(acecount==1 and totalcount>=10):
totalcount+=11
#if more than 1 ace then ace value is 1
elif(acecount != 0):
totalcount+=1
return totalcount
#create player and dealer's hands. Give them 2 cards each.
#return a list with both hands.
def createplayinghands(mydeck):
dealerhands=[]
playerhands=[]
dealerhands.append(mydeck.pop())
dealerhands.append(mydeck.pop())
playerhands.append(mydeck.pop())
playerhands.append(mydeck.pop())
while (pointcount(dealerhands) <=16):
dealerhands.append(mydeck.pop())
return [dealerhands,playerhands]
#game
game= " "
mydeck=deck()
hands=createplayinghands(mydeck)
dealer=hands[0]
player=hands[1]
while (game != "exit"):
dealercount=pointcount(dealer)
playercount=pointcount(player)
print("dealer has: {}".format(dealer[0]))
print ("player has: {}".format(player))
if (playercount==21):
print("player wins! with {} points".format(playercount))
break
elif (playercount >21):
print("player busts!!! with {} points. Dealer wins!".format(playercount))
break
elif (dealercount > 21):
print("dealer busts!!! with {} points. Player wins!".format(dealercount))
break
game = input("What would you like to do? H: HIT \nS:Stand\n")
if (game=='H'):
player.append(mydeck.pop())
elif(playercount>dealercount):
print("player wins! with {} points".format(playercount))
print("dealer has {} or {} points".format(dealer,dealercount))
break
else:
print("Dealer wins!")
print("dealer has {} or {} points".format(dealer,dealercount))
break
| [
"noreply@github.com"
] | sourabbmk.noreply@github.com |
b0ff9f48e6c2f3407f84200f20b514f3846a6faa | 8232afd746f3be1b8bbb6ef9e673c1e9bb8a71c9 | /pdb_ex2.py | 8b2daa7f6c6bdffd23e2502ade5714611f711fed | [] | no_license | Nikhila76/debugging | 50158dc769bb7aa6f082ad2bbc2aac3b30eed33e | b0ce39c063fbc43535fca7eb90b49a5ea3abd05b | refs/heads/master | 2020-11-24T00:19:19.766044 | 2019-12-13T16:39:56 | 2019-12-13T16:39:56 | 227,880,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | import pdb
def add_one(num):
result = num + 1
print(result)
return result
def main():
pdb.set_trace()
for num in range(0, 10):
add_one(num)
if __name__ == '__main__':
main() | [
"noreply@github.com"
] | Nikhila76.noreply@github.com |
553525e00731cc6043347f108d91ed75bd67cdda | c9c176c8e4516fd0017c618586c5e3792d9094c3 | /core/src/epicli/cli/engine/ansible/AnsibleConfigFileCreator.py | 896392a9f7b632ae6a296f76b0ccceb6663f8714 | [
"Apache-2.0",
"GPL-3.0-or-later",
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"BSD-3-Clause",
"LGPL-2.1-or-later",
"ISC",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"Python-2.0",
"MPL-2.0",
"BSD-2-Claus... | permissive | rpudlowski93/epiphany | 6d089c9f6a471d2e0f42341679498c2d8a4bc15b | 3116c587caca77f7c7cae73268fb107ff2ea2174 | refs/heads/develop | 2023-05-07T22:59:01.566518 | 2021-05-20T07:42:46 | 2021-05-20T07:42:46 | 284,035,668 | 0 | 0 | Apache-2.0 | 2020-09-09T11:06:00 | 2020-07-31T12:49:36 | null | UTF-8 | Python | false | false | 2,433 | py | import os
from cli.helpers.build_saver import save_ansible_config_file
from cli.helpers.Step import Step
from collections import OrderedDict
class AnsibleConfigFileCreator(Step):
def __init__(self, ansible_options, ansible_config_file_path):
super().__init__(__name__)
self.ansible_options = ansible_options
self.ansible_config_file_path = ansible_config_file_path
self.ansible_config_file_settings = OrderedDict()
def __enter__(self):
super().__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
super().__exit__(exc_type, exc_value, traceback)
def get_setting(self, section, key):
setting = None
if section in self.ansible_config_file_settings:
setting = self.ansible_config_file_settings[section].get(key)
return setting
def add_setting(self, section, key, value):
if section not in self.ansible_config_file_settings:
self.ansible_config_file_settings[section] = {}
if key not in self.ansible_config_file_settings[section]:
self.ansible_config_file_settings[section].update({key: value})
else:
raise TypeError(f"Setting {section}[{key}] already exists")
def update_setting(self, section, key, value, append=False):
if (section not in self.ansible_config_file_settings or
key not in self.ansible_config_file_settings[section]):
self.add_setting(section, key, value)
else:
if type(self.ansible_config_file_settings[section][key]) is list and append:
self.ansible_config_file_settings[section][key].append(value)
else:
self.ansible_config_file_settings[section][key] = value
def process_ansible_options(self):
if self.ansible_options['profile_tasks']:
self.add_setting('defaults', 'callback_whitelist', ['profile_tasks'])
self.add_setting('defaults', 'interpreter_python', 'auto_legacy_silent')
self.add_setting('defaults', 'allow_world_readable_tmpfiles', 'true') # workaround for delegate_to with become_user
def create(self):
self.logger.info('Creating ansible.cfg')
self.process_ansible_options()
save_ansible_config_file(self.ansible_config_file_settings, self.ansible_config_file_path)
os.environ["ANSIBLE_CONFIG"] = self.ansible_config_file_path
| [
"noreply@github.com"
] | rpudlowski93.noreply@github.com |
d21273618c0ba3f88d15e8539600718e99a08407 | 298b5c5d4d103f6fb2ff6510342d9e302111573e | /seaborn/colors/__init__.py | 3d0bf1d56bdc5c0e724c8eeb95200297884337cc | [
"BSD-3-Clause"
] | permissive | mwaskom/seaborn | a8ea9e8f3932a6324b196862cc6593f69df2d459 | 67a777a54dd1064c3f9038733b1ed71c6d50a6af | refs/heads/master | 2023-08-24T06:22:32.609915 | 2023-08-24T01:09:05 | 2023-08-24T01:09:05 | 4,704,710 | 10,793 | 2,316 | BSD-3-Clause | 2023-09-11T05:04:46 | 2012-06-18T18:41:19 | Python | UTF-8 | Python | false | false | 88 | py | from .xkcd_rgb import xkcd_rgb # noqa: F401
from .crayons import crayons # noqa: F401
| [
"mwaskom@nyu.edu"
] | mwaskom@nyu.edu |
7eedec6b90d538076fbd285523d68f3a3cf6b332 | 5d402d971a51e534db97903c2c766533caac43ad | /.history/3_of_100_20210409144230.py | 8f28e1de243e862f58fe88d5d74acd32043f9f78 | [] | no_license | Samkiroko/python_100_days | 494474723c1e6f30a1682b57af88f9e8e2725a7a | af7c8352f5adda6f52da4ac73f92bf000daa9547 | refs/heads/main | 2023-04-12T11:37:03.660017 | 2021-04-18T19:18:34 | 2021-04-18T19:18:34 | 356,240,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 919 | py |
# Write a program that works out whether if a given year is a leap year.
# A normal year has 365 days, leap years have 366, with an extra day in February.
# The reason why we have leap years is really fascinating,
# this video does it more justice:
# 🚨 Don't change the code below 👇
year = int(input("Which year do you want to check? "))
# 🚨 Don't change the code above 👆
# Write your code below this line 👇
# if (year % 4) == 0:
# if (year % 100) == 0:
# if (year % 400) == 0:
# print("{0} is a leap year".format(year))
# else:
# print("{0} is not a leap year".format(year))
# else:
# print("{0} is a leap year".format(year))
# else:
# print("{0} is not a leap year".format(year))
if year % 4 == 0:
if year % 100 == 0:
if year % 400 == 0:
print("leap year")
else:
print("leap year")
else:
print("not leap year")
| [
"kirokodev@gmail.com"
] | kirokodev@gmail.com |
7e5655692f68542ff5bdf487192f36808cc0e71f | 11e93d33fbc1e1ce37b14969276f13ad1ba1823b | /cef_paths.gypi | cb4bf53b504675bb2cf66d164bdb87552e331f76 | [
"BSD-3-Clause"
] | permissive | chorusg/cef | 671ff2ffd92a361fe4b62649317687b22c062295 | 1ffa5528b3e3640751e19cf47d8bcb615151907b | refs/heads/master | 2023-06-19T08:26:02.558559 | 2021-07-19T15:55:43 | 2021-07-19T15:55:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42,714 | gypi | # Copyright (c) 2021 The Chromium Embedded Framework Authors. All rights
# reserved. Use of this source code is governed by a BSD-style license that
# can be found in the LICENSE file.
#
# ---------------------------------------------------------------------------
#
# This file was generated by the CEF translator tool and should not edited
# by hand. See the translator.README.txt file in the tools directory for
# more information.
#
# $hash=d723a9f6637cec523b158a6750d3a64698b407c3$
#
{
'variables': {
'autogen_cpp_includes': [
'include/cef_accessibility_handler.h',
'include/cef_app.h',
'include/cef_audio_handler.h',
'include/cef_auth_callback.h',
'include/cef_browser.h',
'include/cef_browser_process_handler.h',
'include/cef_callback.h',
'include/cef_client.h',
'include/cef_command_line.h',
'include/cef_context_menu_handler.h',
'include/cef_cookie.h',
'include/cef_crash_util.h',
'include/cef_devtools_message_observer.h',
'include/cef_dialog_handler.h',
'include/cef_display_handler.h',
'include/cef_dom.h',
'include/cef_download_handler.h',
'include/cef_download_item.h',
'include/cef_drag_data.h',
'include/cef_drag_handler.h',
'include/cef_extension.h',
'include/cef_extension_handler.h',
'include/cef_file_util.h',
'include/cef_find_handler.h',
'include/cef_focus_handler.h',
'include/cef_frame.h',
'include/cef_frame_handler.h',
'include/cef_image.h',
'include/cef_jsdialog_handler.h',
'include/cef_keyboard_handler.h',
'include/cef_life_span_handler.h',
'include/cef_load_handler.h',
'include/cef_media_router.h',
'include/cef_menu_model.h',
'include/cef_menu_model_delegate.h',
'include/cef_navigation_entry.h',
'include/cef_origin_whitelist.h',
'include/cef_parser.h',
'include/cef_path_util.h',
'include/cef_print_handler.h',
'include/cef_print_settings.h',
'include/cef_process_message.h',
'include/cef_process_util.h',
'include/cef_registration.h',
'include/cef_render_handler.h',
'include/cef_render_process_handler.h',
'include/cef_request.h',
'include/cef_request_callback.h',
'include/cef_request_context.h',
'include/cef_request_context_handler.h',
'include/cef_request_handler.h',
'include/cef_resource_bundle.h',
'include/cef_resource_bundle_handler.h',
'include/cef_resource_handler.h',
'include/cef_resource_request_handler.h',
'include/cef_response.h',
'include/cef_response_filter.h',
'include/cef_scheme.h',
'include/cef_server.h',
'include/cef_ssl_info.h',
'include/cef_ssl_status.h',
'include/cef_stream.h',
'include/cef_string_visitor.h',
'include/cef_task.h',
'include/cef_thread.h',
'include/cef_trace.h',
'include/cef_urlrequest.h',
'include/cef_v8.h',
'include/cef_values.h',
'include/cef_waitable_event.h',
'include/cef_web_plugin.h',
'include/cef_x509_certificate.h',
'include/cef_xml_reader.h',
'include/cef_zip_reader.h',
'include/test/cef_test_helpers.h',
'include/test/cef_translator_test.h',
'include/views/cef_box_layout.h',
'include/views/cef_browser_view.h',
'include/views/cef_browser_view_delegate.h',
'include/views/cef_button.h',
'include/views/cef_button_delegate.h',
'include/views/cef_display.h',
'include/views/cef_fill_layout.h',
'include/views/cef_label_button.h',
'include/views/cef_layout.h',
'include/views/cef_menu_button.h',
'include/views/cef_menu_button_delegate.h',
'include/views/cef_panel.h',
'include/views/cef_panel_delegate.h',
'include/views/cef_scroll_view.h',
'include/views/cef_textfield.h',
'include/views/cef_textfield_delegate.h',
'include/views/cef_view.h',
'include/views/cef_view_delegate.h',
'include/views/cef_window.h',
'include/views/cef_window_delegate.h',
],
'autogen_capi_includes': [
'include/capi/cef_accessibility_handler_capi.h',
'include/capi/cef_app_capi.h',
'include/capi/cef_audio_handler_capi.h',
'include/capi/cef_auth_callback_capi.h',
'include/capi/cef_browser_capi.h',
'include/capi/cef_browser_process_handler_capi.h',
'include/capi/cef_callback_capi.h',
'include/capi/cef_client_capi.h',
'include/capi/cef_command_line_capi.h',
'include/capi/cef_context_menu_handler_capi.h',
'include/capi/cef_cookie_capi.h',
'include/capi/cef_crash_util_capi.h',
'include/capi/cef_devtools_message_observer_capi.h',
'include/capi/cef_dialog_handler_capi.h',
'include/capi/cef_display_handler_capi.h',
'include/capi/cef_dom_capi.h',
'include/capi/cef_download_handler_capi.h',
'include/capi/cef_download_item_capi.h',
'include/capi/cef_drag_data_capi.h',
'include/capi/cef_drag_handler_capi.h',
'include/capi/cef_extension_capi.h',
'include/capi/cef_extension_handler_capi.h',
'include/capi/cef_file_util_capi.h',
'include/capi/cef_find_handler_capi.h',
'include/capi/cef_focus_handler_capi.h',
'include/capi/cef_frame_capi.h',
'include/capi/cef_frame_handler_capi.h',
'include/capi/cef_image_capi.h',
'include/capi/cef_jsdialog_handler_capi.h',
'include/capi/cef_keyboard_handler_capi.h',
'include/capi/cef_life_span_handler_capi.h',
'include/capi/cef_load_handler_capi.h',
'include/capi/cef_media_router_capi.h',
'include/capi/cef_menu_model_capi.h',
'include/capi/cef_menu_model_delegate_capi.h',
'include/capi/cef_navigation_entry_capi.h',
'include/capi/cef_origin_whitelist_capi.h',
'include/capi/cef_parser_capi.h',
'include/capi/cef_path_util_capi.h',
'include/capi/cef_print_handler_capi.h',
'include/capi/cef_print_settings_capi.h',
'include/capi/cef_process_message_capi.h',
'include/capi/cef_process_util_capi.h',
'include/capi/cef_registration_capi.h',
'include/capi/cef_render_handler_capi.h',
'include/capi/cef_render_process_handler_capi.h',
'include/capi/cef_request_capi.h',
'include/capi/cef_request_callback_capi.h',
'include/capi/cef_request_context_capi.h',
'include/capi/cef_request_context_handler_capi.h',
'include/capi/cef_request_handler_capi.h',
'include/capi/cef_resource_bundle_capi.h',
'include/capi/cef_resource_bundle_handler_capi.h',
'include/capi/cef_resource_handler_capi.h',
'include/capi/cef_resource_request_handler_capi.h',
'include/capi/cef_response_capi.h',
'include/capi/cef_response_filter_capi.h',
'include/capi/cef_scheme_capi.h',
'include/capi/cef_server_capi.h',
'include/capi/cef_ssl_info_capi.h',
'include/capi/cef_ssl_status_capi.h',
'include/capi/cef_stream_capi.h',
'include/capi/cef_string_visitor_capi.h',
'include/capi/cef_task_capi.h',
'include/capi/cef_thread_capi.h',
'include/capi/cef_trace_capi.h',
'include/capi/cef_urlrequest_capi.h',
'include/capi/cef_v8_capi.h',
'include/capi/cef_values_capi.h',
'include/capi/cef_waitable_event_capi.h',
'include/capi/cef_web_plugin_capi.h',
'include/capi/cef_x509_certificate_capi.h',
'include/capi/cef_xml_reader_capi.h',
'include/capi/cef_zip_reader_capi.h',
'include/capi/test/cef_test_helpers_capi.h',
'include/capi/test/cef_translator_test_capi.h',
'include/capi/views/cef_box_layout_capi.h',
'include/capi/views/cef_browser_view_capi.h',
'include/capi/views/cef_browser_view_delegate_capi.h',
'include/capi/views/cef_button_capi.h',
'include/capi/views/cef_button_delegate_capi.h',
'include/capi/views/cef_display_capi.h',
'include/capi/views/cef_fill_layout_capi.h',
'include/capi/views/cef_label_button_capi.h',
'include/capi/views/cef_layout_capi.h',
'include/capi/views/cef_menu_button_capi.h',
'include/capi/views/cef_menu_button_delegate_capi.h',
'include/capi/views/cef_panel_capi.h',
'include/capi/views/cef_panel_delegate_capi.h',
'include/capi/views/cef_scroll_view_capi.h',
'include/capi/views/cef_textfield_capi.h',
'include/capi/views/cef_textfield_delegate_capi.h',
'include/capi/views/cef_view_capi.h',
'include/capi/views/cef_view_delegate_capi.h',
'include/capi/views/cef_window_capi.h',
'include/capi/views/cef_window_delegate_capi.h',
],
'autogen_library_side': [
'libcef_dll/ctocpp/accessibility_handler_ctocpp.cc',
'libcef_dll/ctocpp/accessibility_handler_ctocpp.h',
'libcef_dll/ctocpp/app_ctocpp.cc',
'libcef_dll/ctocpp/app_ctocpp.h',
'libcef_dll/ctocpp/audio_handler_ctocpp.cc',
'libcef_dll/ctocpp/audio_handler_ctocpp.h',
'libcef_dll/cpptoc/auth_callback_cpptoc.cc',
'libcef_dll/cpptoc/auth_callback_cpptoc.h',
'libcef_dll/cpptoc/before_download_callback_cpptoc.cc',
'libcef_dll/cpptoc/before_download_callback_cpptoc.h',
'libcef_dll/cpptoc/binary_value_cpptoc.cc',
'libcef_dll/cpptoc/binary_value_cpptoc.h',
'libcef_dll/cpptoc/views/box_layout_cpptoc.cc',
'libcef_dll/cpptoc/views/box_layout_cpptoc.h',
'libcef_dll/cpptoc/browser_cpptoc.cc',
'libcef_dll/cpptoc/browser_cpptoc.h',
'libcef_dll/cpptoc/browser_host_cpptoc.cc',
'libcef_dll/cpptoc/browser_host_cpptoc.h',
'libcef_dll/ctocpp/browser_process_handler_ctocpp.cc',
'libcef_dll/ctocpp/browser_process_handler_ctocpp.h',
'libcef_dll/cpptoc/views/browser_view_cpptoc.cc',
'libcef_dll/cpptoc/views/browser_view_cpptoc.h',
'libcef_dll/ctocpp/views/browser_view_delegate_ctocpp.cc',
'libcef_dll/ctocpp/views/browser_view_delegate_ctocpp.h',
'libcef_dll/cpptoc/views/button_cpptoc.cc',
'libcef_dll/cpptoc/views/button_cpptoc.h',
'libcef_dll/ctocpp/views/button_delegate_ctocpp.cc',
'libcef_dll/ctocpp/views/button_delegate_ctocpp.h',
'libcef_dll/cpptoc/callback_cpptoc.cc',
'libcef_dll/cpptoc/callback_cpptoc.h',
'libcef_dll/ctocpp/client_ctocpp.cc',
'libcef_dll/ctocpp/client_ctocpp.h',
'libcef_dll/cpptoc/command_line_cpptoc.cc',
'libcef_dll/cpptoc/command_line_cpptoc.h',
'libcef_dll/ctocpp/completion_callback_ctocpp.cc',
'libcef_dll/ctocpp/completion_callback_ctocpp.h',
'libcef_dll/ctocpp/context_menu_handler_ctocpp.cc',
'libcef_dll/ctocpp/context_menu_handler_ctocpp.h',
'libcef_dll/cpptoc/context_menu_params_cpptoc.cc',
'libcef_dll/cpptoc/context_menu_params_cpptoc.h',
'libcef_dll/ctocpp/cookie_access_filter_ctocpp.cc',
'libcef_dll/ctocpp/cookie_access_filter_ctocpp.h',
'libcef_dll/cpptoc/cookie_manager_cpptoc.cc',
'libcef_dll/cpptoc/cookie_manager_cpptoc.h',
'libcef_dll/ctocpp/cookie_visitor_ctocpp.cc',
'libcef_dll/ctocpp/cookie_visitor_ctocpp.h',
'libcef_dll/cpptoc/domdocument_cpptoc.cc',
'libcef_dll/cpptoc/domdocument_cpptoc.h',
'libcef_dll/cpptoc/domnode_cpptoc.cc',
'libcef_dll/cpptoc/domnode_cpptoc.h',
'libcef_dll/ctocpp/domvisitor_ctocpp.cc',
'libcef_dll/ctocpp/domvisitor_ctocpp.h',
'libcef_dll/ctocpp/delete_cookies_callback_ctocpp.cc',
'libcef_dll/ctocpp/delete_cookies_callback_ctocpp.h',
'libcef_dll/ctocpp/dev_tools_message_observer_ctocpp.cc',
'libcef_dll/ctocpp/dev_tools_message_observer_ctocpp.h',
'libcef_dll/ctocpp/dialog_handler_ctocpp.cc',
'libcef_dll/ctocpp/dialog_handler_ctocpp.h',
'libcef_dll/cpptoc/dictionary_value_cpptoc.cc',
'libcef_dll/cpptoc/dictionary_value_cpptoc.h',
'libcef_dll/cpptoc/views/display_cpptoc.cc',
'libcef_dll/cpptoc/views/display_cpptoc.h',
'libcef_dll/ctocpp/display_handler_ctocpp.cc',
'libcef_dll/ctocpp/display_handler_ctocpp.h',
'libcef_dll/ctocpp/download_handler_ctocpp.cc',
'libcef_dll/ctocpp/download_handler_ctocpp.h',
'libcef_dll/ctocpp/download_image_callback_ctocpp.cc',
'libcef_dll/ctocpp/download_image_callback_ctocpp.h',
'libcef_dll/cpptoc/download_item_cpptoc.cc',
'libcef_dll/cpptoc/download_item_cpptoc.h',
'libcef_dll/cpptoc/download_item_callback_cpptoc.cc',
'libcef_dll/cpptoc/download_item_callback_cpptoc.h',
'libcef_dll/cpptoc/drag_data_cpptoc.cc',
'libcef_dll/cpptoc/drag_data_cpptoc.h',
'libcef_dll/ctocpp/drag_handler_ctocpp.cc',
'libcef_dll/ctocpp/drag_handler_ctocpp.h',
'libcef_dll/ctocpp/end_tracing_callback_ctocpp.cc',
'libcef_dll/ctocpp/end_tracing_callback_ctocpp.h',
'libcef_dll/cpptoc/extension_cpptoc.cc',
'libcef_dll/cpptoc/extension_cpptoc.h',
'libcef_dll/ctocpp/extension_handler_ctocpp.cc',
'libcef_dll/ctocpp/extension_handler_ctocpp.h',
'libcef_dll/cpptoc/file_dialog_callback_cpptoc.cc',
'libcef_dll/cpptoc/file_dialog_callback_cpptoc.h',
'libcef_dll/cpptoc/views/fill_layout_cpptoc.cc',
'libcef_dll/cpptoc/views/fill_layout_cpptoc.h',
'libcef_dll/ctocpp/find_handler_ctocpp.cc',
'libcef_dll/ctocpp/find_handler_ctocpp.h',
'libcef_dll/ctocpp/focus_handler_ctocpp.cc',
'libcef_dll/ctocpp/focus_handler_ctocpp.h',
'libcef_dll/cpptoc/frame_cpptoc.cc',
'libcef_dll/cpptoc/frame_cpptoc.h',
'libcef_dll/ctocpp/frame_handler_ctocpp.cc',
'libcef_dll/ctocpp/frame_handler_ctocpp.h',
'libcef_dll/cpptoc/get_extension_resource_callback_cpptoc.cc',
'libcef_dll/cpptoc/get_extension_resource_callback_cpptoc.h',
'libcef_dll/cpptoc/image_cpptoc.cc',
'libcef_dll/cpptoc/image_cpptoc.h',
'libcef_dll/cpptoc/jsdialog_callback_cpptoc.cc',
'libcef_dll/cpptoc/jsdialog_callback_cpptoc.h',
'libcef_dll/ctocpp/jsdialog_handler_ctocpp.cc',
'libcef_dll/ctocpp/jsdialog_handler_ctocpp.h',
'libcef_dll/ctocpp/keyboard_handler_ctocpp.cc',
'libcef_dll/ctocpp/keyboard_handler_ctocpp.h',
'libcef_dll/cpptoc/views/label_button_cpptoc.cc',
'libcef_dll/cpptoc/views/label_button_cpptoc.h',
'libcef_dll/cpptoc/views/layout_cpptoc.cc',
'libcef_dll/cpptoc/views/layout_cpptoc.h',
'libcef_dll/ctocpp/life_span_handler_ctocpp.cc',
'libcef_dll/ctocpp/life_span_handler_ctocpp.h',
'libcef_dll/cpptoc/list_value_cpptoc.cc',
'libcef_dll/cpptoc/list_value_cpptoc.h',
'libcef_dll/ctocpp/load_handler_ctocpp.cc',
'libcef_dll/ctocpp/load_handler_ctocpp.h',
'libcef_dll/ctocpp/media_observer_ctocpp.cc',
'libcef_dll/ctocpp/media_observer_ctocpp.h',
'libcef_dll/cpptoc/media_route_cpptoc.cc',
'libcef_dll/cpptoc/media_route_cpptoc.h',
'libcef_dll/ctocpp/media_route_create_callback_ctocpp.cc',
'libcef_dll/ctocpp/media_route_create_callback_ctocpp.h',
'libcef_dll/cpptoc/media_router_cpptoc.cc',
'libcef_dll/cpptoc/media_router_cpptoc.h',
'libcef_dll/cpptoc/media_sink_cpptoc.cc',
'libcef_dll/cpptoc/media_sink_cpptoc.h',
'libcef_dll/ctocpp/media_sink_device_info_callback_ctocpp.cc',
'libcef_dll/ctocpp/media_sink_device_info_callback_ctocpp.h',
'libcef_dll/cpptoc/media_source_cpptoc.cc',
'libcef_dll/cpptoc/media_source_cpptoc.h',
'libcef_dll/cpptoc/views/menu_button_cpptoc.cc',
'libcef_dll/cpptoc/views/menu_button_cpptoc.h',
'libcef_dll/ctocpp/views/menu_button_delegate_ctocpp.cc',
'libcef_dll/ctocpp/views/menu_button_delegate_ctocpp.h',
'libcef_dll/cpptoc/views/menu_button_pressed_lock_cpptoc.cc',
'libcef_dll/cpptoc/views/menu_button_pressed_lock_cpptoc.h',
'libcef_dll/cpptoc/menu_model_cpptoc.cc',
'libcef_dll/cpptoc/menu_model_cpptoc.h',
'libcef_dll/ctocpp/menu_model_delegate_ctocpp.cc',
'libcef_dll/ctocpp/menu_model_delegate_ctocpp.h',
'libcef_dll/cpptoc/navigation_entry_cpptoc.cc',
'libcef_dll/cpptoc/navigation_entry_cpptoc.h',
'libcef_dll/ctocpp/navigation_entry_visitor_ctocpp.cc',
'libcef_dll/ctocpp/navigation_entry_visitor_ctocpp.h',
'libcef_dll/cpptoc/views/panel_cpptoc.cc',
'libcef_dll/cpptoc/views/panel_cpptoc.h',
'libcef_dll/ctocpp/views/panel_delegate_ctocpp.cc',
'libcef_dll/ctocpp/views/panel_delegate_ctocpp.h',
'libcef_dll/ctocpp/pdf_print_callback_ctocpp.cc',
'libcef_dll/ctocpp/pdf_print_callback_ctocpp.h',
'libcef_dll/cpptoc/post_data_cpptoc.cc',
'libcef_dll/cpptoc/post_data_cpptoc.h',
'libcef_dll/cpptoc/post_data_element_cpptoc.cc',
'libcef_dll/cpptoc/post_data_element_cpptoc.h',
'libcef_dll/cpptoc/print_dialog_callback_cpptoc.cc',
'libcef_dll/cpptoc/print_dialog_callback_cpptoc.h',
'libcef_dll/ctocpp/print_handler_ctocpp.cc',
'libcef_dll/ctocpp/print_handler_ctocpp.h',
'libcef_dll/cpptoc/print_job_callback_cpptoc.cc',
'libcef_dll/cpptoc/print_job_callback_cpptoc.h',
'libcef_dll/cpptoc/print_settings_cpptoc.cc',
'libcef_dll/cpptoc/print_settings_cpptoc.h',
'libcef_dll/cpptoc/process_message_cpptoc.cc',
'libcef_dll/cpptoc/process_message_cpptoc.h',
'libcef_dll/ctocpp/read_handler_ctocpp.cc',
'libcef_dll/ctocpp/read_handler_ctocpp.h',
'libcef_dll/ctocpp/register_cdm_callback_ctocpp.cc',
'libcef_dll/ctocpp/register_cdm_callback_ctocpp.h',
'libcef_dll/cpptoc/registration_cpptoc.cc',
'libcef_dll/cpptoc/registration_cpptoc.h',
'libcef_dll/ctocpp/render_handler_ctocpp.cc',
'libcef_dll/ctocpp/render_handler_ctocpp.h',
'libcef_dll/ctocpp/render_process_handler_ctocpp.cc',
'libcef_dll/ctocpp/render_process_handler_ctocpp.h',
'libcef_dll/cpptoc/request_cpptoc.cc',
'libcef_dll/cpptoc/request_cpptoc.h',
'libcef_dll/cpptoc/request_callback_cpptoc.cc',
'libcef_dll/cpptoc/request_callback_cpptoc.h',
'libcef_dll/cpptoc/request_context_cpptoc.cc',
'libcef_dll/cpptoc/request_context_cpptoc.h',
'libcef_dll/ctocpp/request_context_handler_ctocpp.cc',
'libcef_dll/ctocpp/request_context_handler_ctocpp.h',
'libcef_dll/ctocpp/request_handler_ctocpp.cc',
'libcef_dll/ctocpp/request_handler_ctocpp.h',
'libcef_dll/ctocpp/resolve_callback_ctocpp.cc',
'libcef_dll/ctocpp/resolve_callback_ctocpp.h',
'libcef_dll/cpptoc/resource_bundle_cpptoc.cc',
'libcef_dll/cpptoc/resource_bundle_cpptoc.h',
'libcef_dll/ctocpp/resource_bundle_handler_ctocpp.cc',
'libcef_dll/ctocpp/resource_bundle_handler_ctocpp.h',
'libcef_dll/ctocpp/resource_handler_ctocpp.cc',
'libcef_dll/ctocpp/resource_handler_ctocpp.h',
'libcef_dll/cpptoc/resource_read_callback_cpptoc.cc',
'libcef_dll/cpptoc/resource_read_callback_cpptoc.h',
'libcef_dll/ctocpp/resource_request_handler_ctocpp.cc',
'libcef_dll/ctocpp/resource_request_handler_ctocpp.h',
'libcef_dll/cpptoc/resource_skip_callback_cpptoc.cc',
'libcef_dll/cpptoc/resource_skip_callback_cpptoc.h',
'libcef_dll/cpptoc/response_cpptoc.cc',
'libcef_dll/cpptoc/response_cpptoc.h',
'libcef_dll/ctocpp/response_filter_ctocpp.cc',
'libcef_dll/ctocpp/response_filter_ctocpp.h',
'libcef_dll/cpptoc/run_context_menu_callback_cpptoc.cc',
'libcef_dll/cpptoc/run_context_menu_callback_cpptoc.h',
'libcef_dll/ctocpp/run_file_dialog_callback_ctocpp.cc',
'libcef_dll/ctocpp/run_file_dialog_callback_ctocpp.h',
'libcef_dll/cpptoc/sslinfo_cpptoc.cc',
'libcef_dll/cpptoc/sslinfo_cpptoc.h',
'libcef_dll/cpptoc/sslstatus_cpptoc.cc',
'libcef_dll/cpptoc/sslstatus_cpptoc.h',
'libcef_dll/ctocpp/scheme_handler_factory_ctocpp.cc',
'libcef_dll/ctocpp/scheme_handler_factory_ctocpp.h',
'libcef_dll/cpptoc/scheme_registrar_cpptoc.cc',
'libcef_dll/cpptoc/scheme_registrar_cpptoc.h',
'libcef_dll/cpptoc/views/scroll_view_cpptoc.cc',
'libcef_dll/cpptoc/views/scroll_view_cpptoc.h',
'libcef_dll/cpptoc/select_client_certificate_callback_cpptoc.cc',
'libcef_dll/cpptoc/select_client_certificate_callback_cpptoc.h',
'libcef_dll/cpptoc/server_cpptoc.cc',
'libcef_dll/cpptoc/server_cpptoc.h',
'libcef_dll/ctocpp/server_handler_ctocpp.cc',
'libcef_dll/ctocpp/server_handler_ctocpp.h',
'libcef_dll/ctocpp/set_cookie_callback_ctocpp.cc',
'libcef_dll/ctocpp/set_cookie_callback_ctocpp.h',
'libcef_dll/cpptoc/stream_reader_cpptoc.cc',
'libcef_dll/cpptoc/stream_reader_cpptoc.h',
'libcef_dll/cpptoc/stream_writer_cpptoc.cc',
'libcef_dll/cpptoc/stream_writer_cpptoc.h',
'libcef_dll/ctocpp/string_visitor_ctocpp.cc',
'libcef_dll/ctocpp/string_visitor_ctocpp.h',
'libcef_dll/ctocpp/task_ctocpp.cc',
'libcef_dll/ctocpp/task_ctocpp.h',
'libcef_dll/cpptoc/task_runner_cpptoc.cc',
'libcef_dll/cpptoc/task_runner_cpptoc.h',
'libcef_dll/cpptoc/views/textfield_cpptoc.cc',
'libcef_dll/cpptoc/views/textfield_cpptoc.h',
'libcef_dll/ctocpp/views/textfield_delegate_ctocpp.cc',
'libcef_dll/ctocpp/views/textfield_delegate_ctocpp.h',
'libcef_dll/cpptoc/thread_cpptoc.cc',
'libcef_dll/cpptoc/thread_cpptoc.h',
'libcef_dll/cpptoc/test/translator_test_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_cpptoc.h',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_client_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_client_ctocpp.h',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_client_child_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_client_child_ctocpp.h',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_library_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_library_cpptoc.h',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_library_child_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_library_child_cpptoc.h',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_library_child_child_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_library_child_child_cpptoc.h',
'libcef_dll/ctocpp/test/translator_test_scoped_client_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_scoped_client_ctocpp.h',
'libcef_dll/ctocpp/test/translator_test_scoped_client_child_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_scoped_client_child_ctocpp.h',
'libcef_dll/cpptoc/test/translator_test_scoped_library_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_scoped_library_cpptoc.h',
'libcef_dll/cpptoc/test/translator_test_scoped_library_child_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_scoped_library_child_cpptoc.h',
'libcef_dll/cpptoc/test/translator_test_scoped_library_child_child_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_scoped_library_child_child_cpptoc.h',
'libcef_dll/cpptoc/urlrequest_cpptoc.cc',
'libcef_dll/cpptoc/urlrequest_cpptoc.h',
'libcef_dll/ctocpp/urlrequest_client_ctocpp.cc',
'libcef_dll/ctocpp/urlrequest_client_ctocpp.h',
'libcef_dll/ctocpp/v8accessor_ctocpp.cc',
'libcef_dll/ctocpp/v8accessor_ctocpp.h',
'libcef_dll/ctocpp/v8array_buffer_release_callback_ctocpp.cc',
'libcef_dll/ctocpp/v8array_buffer_release_callback_ctocpp.h',
'libcef_dll/cpptoc/v8context_cpptoc.cc',
'libcef_dll/cpptoc/v8context_cpptoc.h',
'libcef_dll/cpptoc/v8exception_cpptoc.cc',
'libcef_dll/cpptoc/v8exception_cpptoc.h',
'libcef_dll/ctocpp/v8handler_ctocpp.cc',
'libcef_dll/ctocpp/v8handler_ctocpp.h',
'libcef_dll/ctocpp/v8interceptor_ctocpp.cc',
'libcef_dll/ctocpp/v8interceptor_ctocpp.h',
'libcef_dll/cpptoc/v8stack_frame_cpptoc.cc',
'libcef_dll/cpptoc/v8stack_frame_cpptoc.h',
'libcef_dll/cpptoc/v8stack_trace_cpptoc.cc',
'libcef_dll/cpptoc/v8stack_trace_cpptoc.h',
'libcef_dll/cpptoc/v8value_cpptoc.cc',
'libcef_dll/cpptoc/v8value_cpptoc.h',
'libcef_dll/cpptoc/value_cpptoc.cc',
'libcef_dll/cpptoc/value_cpptoc.h',
'libcef_dll/cpptoc/views/view_cpptoc.cc',
'libcef_dll/cpptoc/views/view_cpptoc.h',
'libcef_dll/ctocpp/views/view_delegate_ctocpp.cc',
'libcef_dll/ctocpp/views/view_delegate_ctocpp.h',
'libcef_dll/cpptoc/waitable_event_cpptoc.cc',
'libcef_dll/cpptoc/waitable_event_cpptoc.h',
'libcef_dll/cpptoc/web_plugin_info_cpptoc.cc',
'libcef_dll/cpptoc/web_plugin_info_cpptoc.h',
'libcef_dll/ctocpp/web_plugin_info_visitor_ctocpp.cc',
'libcef_dll/ctocpp/web_plugin_info_visitor_ctocpp.h',
'libcef_dll/ctocpp/web_plugin_unstable_callback_ctocpp.cc',
'libcef_dll/ctocpp/web_plugin_unstable_callback_ctocpp.h',
'libcef_dll/cpptoc/views/window_cpptoc.cc',
'libcef_dll/cpptoc/views/window_cpptoc.h',
'libcef_dll/ctocpp/views/window_delegate_ctocpp.cc',
'libcef_dll/ctocpp/views/window_delegate_ctocpp.h',
'libcef_dll/ctocpp/write_handler_ctocpp.cc',
'libcef_dll/ctocpp/write_handler_ctocpp.h',
'libcef_dll/cpptoc/x509cert_principal_cpptoc.cc',
'libcef_dll/cpptoc/x509cert_principal_cpptoc.h',
'libcef_dll/cpptoc/x509certificate_cpptoc.cc',
'libcef_dll/cpptoc/x509certificate_cpptoc.h',
'libcef_dll/cpptoc/xml_reader_cpptoc.cc',
'libcef_dll/cpptoc/xml_reader_cpptoc.h',
'libcef_dll/cpptoc/zip_reader_cpptoc.cc',
'libcef_dll/cpptoc/zip_reader_cpptoc.h',
],
'autogen_client_side': [
'libcef_dll/cpptoc/accessibility_handler_cpptoc.cc',
'libcef_dll/cpptoc/accessibility_handler_cpptoc.h',
'libcef_dll/cpptoc/app_cpptoc.cc',
'libcef_dll/cpptoc/app_cpptoc.h',
'libcef_dll/cpptoc/audio_handler_cpptoc.cc',
'libcef_dll/cpptoc/audio_handler_cpptoc.h',
'libcef_dll/ctocpp/auth_callback_ctocpp.cc',
'libcef_dll/ctocpp/auth_callback_ctocpp.h',
'libcef_dll/ctocpp/before_download_callback_ctocpp.cc',
'libcef_dll/ctocpp/before_download_callback_ctocpp.h',
'libcef_dll/ctocpp/binary_value_ctocpp.cc',
'libcef_dll/ctocpp/binary_value_ctocpp.h',
'libcef_dll/ctocpp/views/box_layout_ctocpp.cc',
'libcef_dll/ctocpp/views/box_layout_ctocpp.h',
'libcef_dll/ctocpp/browser_ctocpp.cc',
'libcef_dll/ctocpp/browser_ctocpp.h',
'libcef_dll/ctocpp/browser_host_ctocpp.cc',
'libcef_dll/ctocpp/browser_host_ctocpp.h',
'libcef_dll/cpptoc/browser_process_handler_cpptoc.cc',
'libcef_dll/cpptoc/browser_process_handler_cpptoc.h',
'libcef_dll/ctocpp/views/browser_view_ctocpp.cc',
'libcef_dll/ctocpp/views/browser_view_ctocpp.h',
'libcef_dll/cpptoc/views/browser_view_delegate_cpptoc.cc',
'libcef_dll/cpptoc/views/browser_view_delegate_cpptoc.h',
'libcef_dll/ctocpp/views/button_ctocpp.cc',
'libcef_dll/ctocpp/views/button_ctocpp.h',
'libcef_dll/cpptoc/views/button_delegate_cpptoc.cc',
'libcef_dll/cpptoc/views/button_delegate_cpptoc.h',
'libcef_dll/ctocpp/callback_ctocpp.cc',
'libcef_dll/ctocpp/callback_ctocpp.h',
'libcef_dll/cpptoc/client_cpptoc.cc',
'libcef_dll/cpptoc/client_cpptoc.h',
'libcef_dll/ctocpp/command_line_ctocpp.cc',
'libcef_dll/ctocpp/command_line_ctocpp.h',
'libcef_dll/cpptoc/completion_callback_cpptoc.cc',
'libcef_dll/cpptoc/completion_callback_cpptoc.h',
'libcef_dll/cpptoc/context_menu_handler_cpptoc.cc',
'libcef_dll/cpptoc/context_menu_handler_cpptoc.h',
'libcef_dll/ctocpp/context_menu_params_ctocpp.cc',
'libcef_dll/ctocpp/context_menu_params_ctocpp.h',
'libcef_dll/cpptoc/cookie_access_filter_cpptoc.cc',
'libcef_dll/cpptoc/cookie_access_filter_cpptoc.h',
'libcef_dll/ctocpp/cookie_manager_ctocpp.cc',
'libcef_dll/ctocpp/cookie_manager_ctocpp.h',
'libcef_dll/cpptoc/cookie_visitor_cpptoc.cc',
'libcef_dll/cpptoc/cookie_visitor_cpptoc.h',
'libcef_dll/ctocpp/domdocument_ctocpp.cc',
'libcef_dll/ctocpp/domdocument_ctocpp.h',
'libcef_dll/ctocpp/domnode_ctocpp.cc',
'libcef_dll/ctocpp/domnode_ctocpp.h',
'libcef_dll/cpptoc/domvisitor_cpptoc.cc',
'libcef_dll/cpptoc/domvisitor_cpptoc.h',
'libcef_dll/cpptoc/delete_cookies_callback_cpptoc.cc',
'libcef_dll/cpptoc/delete_cookies_callback_cpptoc.h',
'libcef_dll/cpptoc/dev_tools_message_observer_cpptoc.cc',
'libcef_dll/cpptoc/dev_tools_message_observer_cpptoc.h',
'libcef_dll/cpptoc/dialog_handler_cpptoc.cc',
'libcef_dll/cpptoc/dialog_handler_cpptoc.h',
'libcef_dll/ctocpp/dictionary_value_ctocpp.cc',
'libcef_dll/ctocpp/dictionary_value_ctocpp.h',
'libcef_dll/ctocpp/views/display_ctocpp.cc',
'libcef_dll/ctocpp/views/display_ctocpp.h',
'libcef_dll/cpptoc/display_handler_cpptoc.cc',
'libcef_dll/cpptoc/display_handler_cpptoc.h',
'libcef_dll/cpptoc/download_handler_cpptoc.cc',
'libcef_dll/cpptoc/download_handler_cpptoc.h',
'libcef_dll/cpptoc/download_image_callback_cpptoc.cc',
'libcef_dll/cpptoc/download_image_callback_cpptoc.h',
'libcef_dll/ctocpp/download_item_ctocpp.cc',
'libcef_dll/ctocpp/download_item_ctocpp.h',
'libcef_dll/ctocpp/download_item_callback_ctocpp.cc',
'libcef_dll/ctocpp/download_item_callback_ctocpp.h',
'libcef_dll/ctocpp/drag_data_ctocpp.cc',
'libcef_dll/ctocpp/drag_data_ctocpp.h',
'libcef_dll/cpptoc/drag_handler_cpptoc.cc',
'libcef_dll/cpptoc/drag_handler_cpptoc.h',
'libcef_dll/cpptoc/end_tracing_callback_cpptoc.cc',
'libcef_dll/cpptoc/end_tracing_callback_cpptoc.h',
'libcef_dll/ctocpp/extension_ctocpp.cc',
'libcef_dll/ctocpp/extension_ctocpp.h',
'libcef_dll/cpptoc/extension_handler_cpptoc.cc',
'libcef_dll/cpptoc/extension_handler_cpptoc.h',
'libcef_dll/ctocpp/file_dialog_callback_ctocpp.cc',
'libcef_dll/ctocpp/file_dialog_callback_ctocpp.h',
'libcef_dll/ctocpp/views/fill_layout_ctocpp.cc',
'libcef_dll/ctocpp/views/fill_layout_ctocpp.h',
'libcef_dll/cpptoc/find_handler_cpptoc.cc',
'libcef_dll/cpptoc/find_handler_cpptoc.h',
'libcef_dll/cpptoc/focus_handler_cpptoc.cc',
'libcef_dll/cpptoc/focus_handler_cpptoc.h',
'libcef_dll/ctocpp/frame_ctocpp.cc',
'libcef_dll/ctocpp/frame_ctocpp.h',
'libcef_dll/cpptoc/frame_handler_cpptoc.cc',
'libcef_dll/cpptoc/frame_handler_cpptoc.h',
'libcef_dll/ctocpp/get_extension_resource_callback_ctocpp.cc',
'libcef_dll/ctocpp/get_extension_resource_callback_ctocpp.h',
'libcef_dll/ctocpp/image_ctocpp.cc',
'libcef_dll/ctocpp/image_ctocpp.h',
'libcef_dll/ctocpp/jsdialog_callback_ctocpp.cc',
'libcef_dll/ctocpp/jsdialog_callback_ctocpp.h',
'libcef_dll/cpptoc/jsdialog_handler_cpptoc.cc',
'libcef_dll/cpptoc/jsdialog_handler_cpptoc.h',
'libcef_dll/cpptoc/keyboard_handler_cpptoc.cc',
'libcef_dll/cpptoc/keyboard_handler_cpptoc.h',
'libcef_dll/ctocpp/views/label_button_ctocpp.cc',
'libcef_dll/ctocpp/views/label_button_ctocpp.h',
'libcef_dll/ctocpp/views/layout_ctocpp.cc',
'libcef_dll/ctocpp/views/layout_ctocpp.h',
'libcef_dll/cpptoc/life_span_handler_cpptoc.cc',
'libcef_dll/cpptoc/life_span_handler_cpptoc.h',
'libcef_dll/ctocpp/list_value_ctocpp.cc',
'libcef_dll/ctocpp/list_value_ctocpp.h',
'libcef_dll/cpptoc/load_handler_cpptoc.cc',
'libcef_dll/cpptoc/load_handler_cpptoc.h',
'libcef_dll/cpptoc/media_observer_cpptoc.cc',
'libcef_dll/cpptoc/media_observer_cpptoc.h',
'libcef_dll/ctocpp/media_route_ctocpp.cc',
'libcef_dll/ctocpp/media_route_ctocpp.h',
'libcef_dll/cpptoc/media_route_create_callback_cpptoc.cc',
'libcef_dll/cpptoc/media_route_create_callback_cpptoc.h',
'libcef_dll/ctocpp/media_router_ctocpp.cc',
'libcef_dll/ctocpp/media_router_ctocpp.h',
'libcef_dll/ctocpp/media_sink_ctocpp.cc',
'libcef_dll/ctocpp/media_sink_ctocpp.h',
'libcef_dll/cpptoc/media_sink_device_info_callback_cpptoc.cc',
'libcef_dll/cpptoc/media_sink_device_info_callback_cpptoc.h',
'libcef_dll/ctocpp/media_source_ctocpp.cc',
'libcef_dll/ctocpp/media_source_ctocpp.h',
'libcef_dll/ctocpp/views/menu_button_ctocpp.cc',
'libcef_dll/ctocpp/views/menu_button_ctocpp.h',
'libcef_dll/cpptoc/views/menu_button_delegate_cpptoc.cc',
'libcef_dll/cpptoc/views/menu_button_delegate_cpptoc.h',
'libcef_dll/ctocpp/views/menu_button_pressed_lock_ctocpp.cc',
'libcef_dll/ctocpp/views/menu_button_pressed_lock_ctocpp.h',
'libcef_dll/ctocpp/menu_model_ctocpp.cc',
'libcef_dll/ctocpp/menu_model_ctocpp.h',
'libcef_dll/cpptoc/menu_model_delegate_cpptoc.cc',
'libcef_dll/cpptoc/menu_model_delegate_cpptoc.h',
'libcef_dll/ctocpp/navigation_entry_ctocpp.cc',
'libcef_dll/ctocpp/navigation_entry_ctocpp.h',
'libcef_dll/cpptoc/navigation_entry_visitor_cpptoc.cc',
'libcef_dll/cpptoc/navigation_entry_visitor_cpptoc.h',
'libcef_dll/ctocpp/views/panel_ctocpp.cc',
'libcef_dll/ctocpp/views/panel_ctocpp.h',
'libcef_dll/cpptoc/views/panel_delegate_cpptoc.cc',
'libcef_dll/cpptoc/views/panel_delegate_cpptoc.h',
'libcef_dll/cpptoc/pdf_print_callback_cpptoc.cc',
'libcef_dll/cpptoc/pdf_print_callback_cpptoc.h',
'libcef_dll/ctocpp/post_data_ctocpp.cc',
'libcef_dll/ctocpp/post_data_ctocpp.h',
'libcef_dll/ctocpp/post_data_element_ctocpp.cc',
'libcef_dll/ctocpp/post_data_element_ctocpp.h',
'libcef_dll/ctocpp/print_dialog_callback_ctocpp.cc',
'libcef_dll/ctocpp/print_dialog_callback_ctocpp.h',
'libcef_dll/cpptoc/print_handler_cpptoc.cc',
'libcef_dll/cpptoc/print_handler_cpptoc.h',
'libcef_dll/ctocpp/print_job_callback_ctocpp.cc',
'libcef_dll/ctocpp/print_job_callback_ctocpp.h',
'libcef_dll/ctocpp/print_settings_ctocpp.cc',
'libcef_dll/ctocpp/print_settings_ctocpp.h',
'libcef_dll/ctocpp/process_message_ctocpp.cc',
'libcef_dll/ctocpp/process_message_ctocpp.h',
'libcef_dll/cpptoc/read_handler_cpptoc.cc',
'libcef_dll/cpptoc/read_handler_cpptoc.h',
'libcef_dll/cpptoc/register_cdm_callback_cpptoc.cc',
'libcef_dll/cpptoc/register_cdm_callback_cpptoc.h',
'libcef_dll/ctocpp/registration_ctocpp.cc',
'libcef_dll/ctocpp/registration_ctocpp.h',
'libcef_dll/cpptoc/render_handler_cpptoc.cc',
'libcef_dll/cpptoc/render_handler_cpptoc.h',
'libcef_dll/cpptoc/render_process_handler_cpptoc.cc',
'libcef_dll/cpptoc/render_process_handler_cpptoc.h',
'libcef_dll/ctocpp/request_ctocpp.cc',
'libcef_dll/ctocpp/request_ctocpp.h',
'libcef_dll/ctocpp/request_callback_ctocpp.cc',
'libcef_dll/ctocpp/request_callback_ctocpp.h',
'libcef_dll/ctocpp/request_context_ctocpp.cc',
'libcef_dll/ctocpp/request_context_ctocpp.h',
'libcef_dll/cpptoc/request_context_handler_cpptoc.cc',
'libcef_dll/cpptoc/request_context_handler_cpptoc.h',
'libcef_dll/cpptoc/request_handler_cpptoc.cc',
'libcef_dll/cpptoc/request_handler_cpptoc.h',
'libcef_dll/cpptoc/resolve_callback_cpptoc.cc',
'libcef_dll/cpptoc/resolve_callback_cpptoc.h',
'libcef_dll/ctocpp/resource_bundle_ctocpp.cc',
'libcef_dll/ctocpp/resource_bundle_ctocpp.h',
'libcef_dll/cpptoc/resource_bundle_handler_cpptoc.cc',
'libcef_dll/cpptoc/resource_bundle_handler_cpptoc.h',
'libcef_dll/cpptoc/resource_handler_cpptoc.cc',
'libcef_dll/cpptoc/resource_handler_cpptoc.h',
'libcef_dll/ctocpp/resource_read_callback_ctocpp.cc',
'libcef_dll/ctocpp/resource_read_callback_ctocpp.h',
'libcef_dll/cpptoc/resource_request_handler_cpptoc.cc',
'libcef_dll/cpptoc/resource_request_handler_cpptoc.h',
'libcef_dll/ctocpp/resource_skip_callback_ctocpp.cc',
'libcef_dll/ctocpp/resource_skip_callback_ctocpp.h',
'libcef_dll/ctocpp/response_ctocpp.cc',
'libcef_dll/ctocpp/response_ctocpp.h',
'libcef_dll/cpptoc/response_filter_cpptoc.cc',
'libcef_dll/cpptoc/response_filter_cpptoc.h',
'libcef_dll/ctocpp/run_context_menu_callback_ctocpp.cc',
'libcef_dll/ctocpp/run_context_menu_callback_ctocpp.h',
'libcef_dll/cpptoc/run_file_dialog_callback_cpptoc.cc',
'libcef_dll/cpptoc/run_file_dialog_callback_cpptoc.h',
'libcef_dll/ctocpp/sslinfo_ctocpp.cc',
'libcef_dll/ctocpp/sslinfo_ctocpp.h',
'libcef_dll/ctocpp/sslstatus_ctocpp.cc',
'libcef_dll/ctocpp/sslstatus_ctocpp.h',
'libcef_dll/cpptoc/scheme_handler_factory_cpptoc.cc',
'libcef_dll/cpptoc/scheme_handler_factory_cpptoc.h',
'libcef_dll/ctocpp/scheme_registrar_ctocpp.cc',
'libcef_dll/ctocpp/scheme_registrar_ctocpp.h',
'libcef_dll/ctocpp/views/scroll_view_ctocpp.cc',
'libcef_dll/ctocpp/views/scroll_view_ctocpp.h',
'libcef_dll/ctocpp/select_client_certificate_callback_ctocpp.cc',
'libcef_dll/ctocpp/select_client_certificate_callback_ctocpp.h',
'libcef_dll/ctocpp/server_ctocpp.cc',
'libcef_dll/ctocpp/server_ctocpp.h',
'libcef_dll/cpptoc/server_handler_cpptoc.cc',
'libcef_dll/cpptoc/server_handler_cpptoc.h',
'libcef_dll/cpptoc/set_cookie_callback_cpptoc.cc',
'libcef_dll/cpptoc/set_cookie_callback_cpptoc.h',
'libcef_dll/ctocpp/stream_reader_ctocpp.cc',
'libcef_dll/ctocpp/stream_reader_ctocpp.h',
'libcef_dll/ctocpp/stream_writer_ctocpp.cc',
'libcef_dll/ctocpp/stream_writer_ctocpp.h',
'libcef_dll/cpptoc/string_visitor_cpptoc.cc',
'libcef_dll/cpptoc/string_visitor_cpptoc.h',
'libcef_dll/cpptoc/task_cpptoc.cc',
'libcef_dll/cpptoc/task_cpptoc.h',
'libcef_dll/ctocpp/task_runner_ctocpp.cc',
'libcef_dll/ctocpp/task_runner_ctocpp.h',
'libcef_dll/ctocpp/views/textfield_ctocpp.cc',
'libcef_dll/ctocpp/views/textfield_ctocpp.h',
'libcef_dll/cpptoc/views/textfield_delegate_cpptoc.cc',
'libcef_dll/cpptoc/views/textfield_delegate_cpptoc.h',
'libcef_dll/ctocpp/thread_ctocpp.cc',
'libcef_dll/ctocpp/thread_ctocpp.h',
'libcef_dll/ctocpp/test/translator_test_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_ctocpp.h',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_client_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_client_cpptoc.h',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_client_child_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_ref_ptr_client_child_cpptoc.h',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_library_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_library_ctocpp.h',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_library_child_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_library_child_ctocpp.h',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_library_child_child_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_ref_ptr_library_child_child_ctocpp.h',
'libcef_dll/cpptoc/test/translator_test_scoped_client_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_scoped_client_cpptoc.h',
'libcef_dll/cpptoc/test/translator_test_scoped_client_child_cpptoc.cc',
'libcef_dll/cpptoc/test/translator_test_scoped_client_child_cpptoc.h',
'libcef_dll/ctocpp/test/translator_test_scoped_library_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_scoped_library_ctocpp.h',
'libcef_dll/ctocpp/test/translator_test_scoped_library_child_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_scoped_library_child_ctocpp.h',
'libcef_dll/ctocpp/test/translator_test_scoped_library_child_child_ctocpp.cc',
'libcef_dll/ctocpp/test/translator_test_scoped_library_child_child_ctocpp.h',
'libcef_dll/ctocpp/urlrequest_ctocpp.cc',
'libcef_dll/ctocpp/urlrequest_ctocpp.h',
'libcef_dll/cpptoc/urlrequest_client_cpptoc.cc',
'libcef_dll/cpptoc/urlrequest_client_cpptoc.h',
'libcef_dll/cpptoc/v8accessor_cpptoc.cc',
'libcef_dll/cpptoc/v8accessor_cpptoc.h',
'libcef_dll/cpptoc/v8array_buffer_release_callback_cpptoc.cc',
'libcef_dll/cpptoc/v8array_buffer_release_callback_cpptoc.h',
'libcef_dll/ctocpp/v8context_ctocpp.cc',
'libcef_dll/ctocpp/v8context_ctocpp.h',
'libcef_dll/ctocpp/v8exception_ctocpp.cc',
'libcef_dll/ctocpp/v8exception_ctocpp.h',
'libcef_dll/cpptoc/v8handler_cpptoc.cc',
'libcef_dll/cpptoc/v8handler_cpptoc.h',
'libcef_dll/cpptoc/v8interceptor_cpptoc.cc',
'libcef_dll/cpptoc/v8interceptor_cpptoc.h',
'libcef_dll/ctocpp/v8stack_frame_ctocpp.cc',
'libcef_dll/ctocpp/v8stack_frame_ctocpp.h',
'libcef_dll/ctocpp/v8stack_trace_ctocpp.cc',
'libcef_dll/ctocpp/v8stack_trace_ctocpp.h',
'libcef_dll/ctocpp/v8value_ctocpp.cc',
'libcef_dll/ctocpp/v8value_ctocpp.h',
'libcef_dll/ctocpp/value_ctocpp.cc',
'libcef_dll/ctocpp/value_ctocpp.h',
'libcef_dll/ctocpp/views/view_ctocpp.cc',
'libcef_dll/ctocpp/views/view_ctocpp.h',
'libcef_dll/cpptoc/views/view_delegate_cpptoc.cc',
'libcef_dll/cpptoc/views/view_delegate_cpptoc.h',
'libcef_dll/ctocpp/waitable_event_ctocpp.cc',
'libcef_dll/ctocpp/waitable_event_ctocpp.h',
'libcef_dll/ctocpp/web_plugin_info_ctocpp.cc',
'libcef_dll/ctocpp/web_plugin_info_ctocpp.h',
'libcef_dll/cpptoc/web_plugin_info_visitor_cpptoc.cc',
'libcef_dll/cpptoc/web_plugin_info_visitor_cpptoc.h',
'libcef_dll/cpptoc/web_plugin_unstable_callback_cpptoc.cc',
'libcef_dll/cpptoc/web_plugin_unstable_callback_cpptoc.h',
'libcef_dll/ctocpp/views/window_ctocpp.cc',
'libcef_dll/ctocpp/views/window_ctocpp.h',
'libcef_dll/cpptoc/views/window_delegate_cpptoc.cc',
'libcef_dll/cpptoc/views/window_delegate_cpptoc.h',
'libcef_dll/cpptoc/write_handler_cpptoc.cc',
'libcef_dll/cpptoc/write_handler_cpptoc.h',
'libcef_dll/ctocpp/x509cert_principal_ctocpp.cc',
'libcef_dll/ctocpp/x509cert_principal_ctocpp.h',
'libcef_dll/ctocpp/x509certificate_ctocpp.cc',
'libcef_dll/ctocpp/x509certificate_ctocpp.h',
'libcef_dll/ctocpp/xml_reader_ctocpp.cc',
'libcef_dll/ctocpp/xml_reader_ctocpp.h',
'libcef_dll/ctocpp/zip_reader_ctocpp.cc',
'libcef_dll/ctocpp/zip_reader_ctocpp.h',
],
},
}
| [
"magreenblatt@gmail.com"
] | magreenblatt@gmail.com |
4dca2008c0b2d727f3921ac5ce13a814ba4dd16e | 463405ee0b4eabe0c0909215645d1ec6b7417964 | /blog/models.py | 38738797104af6b6ffa931317e37782688af7b77 | [] | no_license | vaceero/blog-project | 153f49e2ef2817de0091abf54914e055c2473a11 | d2cd986b7fc86ff22b34ac2f6e4852ef9c8219fd | refs/heads/master | 2021-05-18T18:12:04.948075 | 2020-04-10T15:48:41 | 2020-04-10T15:48:41 | 251,353,138 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 965 | py | from django.db import models
from django.utils import timezone
class Post(models.Model):
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
class Comment(models.Model):
post = models.ForeignKey('blog.Post', on_delete=models.CASCADE, related_name='comments')
author = models.CharField(max_length=50)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
is_approved = models.BooleanField(default=False)
def approve(self):
self.is_approved = True
self.save()
def __str__(self):
return self.text | [
"przemyslaw.sujecki@gmail.com"
] | przemyslaw.sujecki@gmail.com |
5d22da835c8636ffdb5fdd2692263065d45e9c99 | ff16f1c4beb1f96a0d8550f000f2c24b069b8ed3 | /src/pycairo/gtk/Transform.py | 86ffd4153d4783d57c17e9b6a70fc3ca030b1ea0 | [] | no_license | simon-rock/python_workspace | 32c4384815d03a493d9957e1ea15283426ba414f | 9299363f3dfea0371d2d2a02b9beccd4320093d6 | refs/heads/master | 2020-12-24T14:00:48.683661 | 2020-07-28T07:47:16 | 2020-07-28T07:47:16 | 34,956,011 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,105 | py | #!/usr/bin/env python
#coding: utf-8
#encoding: utf-8
import framework
from math import pi
class Transform(framework.Screen):
def draw(self, cr, width, height):
cr.set_source_rgb(0.5, 0.5, 0.5)
cr.rectangle(0, 0, width, height)
cr.fill()
# draw a rectangle
cr.set_source_rgb(1.0, 1.0, 1.0)
cr.rectangle(10, 10, width - 20, height - 20)
cr.fill()
# set up a transform so that (0,0) to (1,1)
# maps to (20, 20) to (width - 40, height - 40)
cr.translate(20, 20)
cr.scale((width - 40) / 1.0, (height - 40) / 1.0)
# draw lines
cr.set_line_width(0.01)
cr.set_source_rgb(0.0, 0.0, 0.8)
cr.move_to(1 / 3.0, 1 / 3.0)
cr.rel_line_to(0, 1 / 6.0)
cr.move_to(2 / 3.0, 1 / 3.0)
cr.rel_line_to(0, 1 / 6.0)
cr.stroke()
# and a circle
cr.set_source_rgb(1.0, 0.0, 0.0)
radius = 1
cr.arc(0.5, 0.5, 0.5, 0, 2 * pi)
cr.stroke()
cr.arc(0.5, 0.5, 0.33, pi / 3, 2 * pi / 3)
cr.stroke()
framework.run(Transform)
| [
"simon29rock@gmail.com"
] | simon29rock@gmail.com |
cfc28ca228ed7494d798191c00ed9e5807b91e18 | 53560ba320b76244216bb7c940cb5c81dc856669 | /monster/middleware.py | baab2ad294e6d1b806f254bd40493ac47ddd94c3 | [] | no_license | TitanEntertainmentGroup/django-monster | eaf53a0742246340222adced5731d0651f2fdf6a | 65be27b552b024af4babca8ad094f8710393690c | refs/heads/master | 2021-01-18T01:38:41.484574 | 2013-09-17T08:59:06 | 2013-09-17T08:59:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,430 | py | from django.conf import settings
from django.template.loader import render_to_string
from django.utils.encoding import smart_unicode
from monster.models import Region
def replace_insensitive(string, target, replacement):
"""
Similar to string.replace() but is case insensitive
Code borrowed from: http://forums.devshed.com/python-programming-11/case-insensitive-string-replace-490921.html
"""
no_case = string.lower()
index = no_case.rfind(target.lower())
if index >= 0:
return string[:index] + replacement + string[index + len(target):]
else: # no results so return the original string
return string
class MonsterMiddleware():
def process_request(self, request):
request.monster_enabled = False
def process_response(self, request, response):
try:
getattr(request,'monster_enabled')
except:
return response
if request.monster_enabled:
data = {
'MONSTER_MEDIA_URL': settings.MONSTER_MEDIA_URL,
}
try:
toolbar = render_to_string('monster/toolbar.html', data)
response.content = replace_insensitive(smart_unicode(response.content), u'</body>', smart_unicode(toolbar + u'</body>'))
except Exception, e:
return response
return response | [
"andrew.ingram@titanemail.com"
] | andrew.ingram@titanemail.com |
8b53aa085bd20e81eb42d40797d09b3746a16116 | 130215e73cd45824fc5b7b2bc85949ce03115f20 | /py/syn10m02m.py | 320e65ea39115ace48776945eb23d5a8a2a61ab1 | [] | no_license | felicitygong/MINLPinstances | 062634bf709a782a860234ec2daa7e6bf374371e | 1cd9c799c5758baa0818394c07adea84659c064c | refs/heads/master | 2022-12-06T11:58:14.141832 | 2022-12-01T17:17:35 | 2022-12-01T17:17:35 | 119,295,560 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 18,238 | py | # MINLP written by GAMS Convert at 11/10/17 15:35:28
#
# Equation counts
# Total E G L N X C B
# 199 15 50 134 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 111 71 40 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 501 489 12 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b52 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b53 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b54 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b64 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b65 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b66 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b67 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b68 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b69 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b70 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b71 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b72 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b73 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b74 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b75 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b76 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b77 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b78 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b79 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b80 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b81 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b82 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b83 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b84 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b85 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b86 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b87 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b88 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b89 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b90 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b91 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x92 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(None,None),initialize=0)
m.obj = Objective(expr= - m.x2 - m.x3 + 5*m.x14 + 10*m.x15 - 2*m.x24 - m.x25 + 80*m.x40 + 90*m.x41 + 285*m.x42
+ 390*m.x43 + 290*m.x44 + 405*m.x45 + 280*m.x46 + 400*m.x47 + 290*m.x48 + 300*m.x49 + 350*m.x50
+ 250*m.x51 - 5*m.b72 - 4*m.b73 - 8*m.b74 - 7*m.b75 - 6*m.b76 - 9*m.b77 - 10*m.b78 - 9*m.b79
- 6*m.b80 - 10*m.b81 - 7*m.b82 - 7*m.b83 - 4*m.b84 - 3*m.b85 - 5*m.b86 - 6*m.b87 - 2*m.b88
- 5*m.b89 - 4*m.b90 - 7*m.b91, sense=maximize)
m.c2 = Constraint(expr= m.x2 - m.x4 - m.x6 == 0)
m.c3 = Constraint(expr= m.x3 - m.x5 - m.x7 == 0)
m.c4 = Constraint(expr= - m.x8 - m.x10 + m.x12 == 0)
m.c5 = Constraint(expr= - m.x9 - m.x11 + m.x13 == 0)
m.c6 = Constraint(expr= m.x12 - m.x14 - m.x16 == 0)
m.c7 = Constraint(expr= m.x13 - m.x15 - m.x17 == 0)
m.c8 = Constraint(expr= m.x16 - m.x18 - m.x20 - m.x22 == 0)
m.c9 = Constraint(expr= m.x17 - m.x19 - m.x21 - m.x23 == 0)
m.c10 = Constraint(expr= m.x26 - m.x32 - m.x34 == 0)
m.c11 = Constraint(expr= m.x27 - m.x33 - m.x35 == 0)
m.c12 = Constraint(expr= m.x30 - m.x36 - m.x38 - m.x40 == 0)
m.c13 = Constraint(expr= m.x31 - m.x37 - m.x39 - m.x41 == 0)
m.c14 = Constraint(expr=-log(1 + m.x4) + m.x8 + m.b52 <= 1)
m.c15 = Constraint(expr=-log(1 + m.x5) + m.x9 + m.b53 <= 1)
m.c16 = Constraint(expr= m.x4 - 40*m.b52 <= 0)
m.c17 = Constraint(expr= m.x5 - 40*m.b53 <= 0)
m.c18 = Constraint(expr= m.x8 - 3.71357206670431*m.b52 <= 0)
m.c19 = Constraint(expr= m.x9 - 3.71357206670431*m.b53 <= 0)
m.c20 = Constraint(expr=-1.2*log(1 + m.x6) + m.x10 + m.b54 <= 1)
m.c21 = Constraint(expr=-1.2*log(1 + m.x7) + m.x11 + m.b55 <= 1)
m.c22 = Constraint(expr= m.x6 - 40*m.b54 <= 0)
m.c23 = Constraint(expr= m.x7 - 40*m.b55 <= 0)
m.c24 = Constraint(expr= m.x10 - 4.45628648004517*m.b54 <= 0)
m.c25 = Constraint(expr= m.x11 - 4.45628648004517*m.b55 <= 0)
m.c26 = Constraint(expr= - 0.75*m.x18 + m.x26 + m.b56 <= 1)
m.c27 = Constraint(expr= - 0.75*m.x19 + m.x27 + m.b57 <= 1)
m.c28 = Constraint(expr= - 0.75*m.x18 + m.x26 - m.b56 >= -1)
m.c29 = Constraint(expr= - 0.75*m.x19 + m.x27 - m.b57 >= -1)
m.c30 = Constraint(expr= m.x18 - 4.45628648004517*m.b56 <= 0)
m.c31 = Constraint(expr= m.x19 - 4.45628648004517*m.b57 <= 0)
m.c32 = Constraint(expr= m.x26 - 3.34221486003388*m.b56 <= 0)
m.c33 = Constraint(expr= m.x27 - 3.34221486003388*m.b57 <= 0)
m.c34 = Constraint(expr=-1.5*log(1 + m.x20) + m.x28 + m.b58 <= 1)
m.c35 = Constraint(expr=-1.5*log(1 + m.x21) + m.x29 + m.b59 <= 1)
m.c36 = Constraint(expr= m.x20 - 4.45628648004517*m.b58 <= 0)
m.c37 = Constraint(expr= m.x21 - 4.45628648004517*m.b59 <= 0)
m.c38 = Constraint(expr= m.x28 - 2.54515263975353*m.b58 <= 0)
m.c39 = Constraint(expr= m.x29 - 2.54515263975353*m.b59 <= 0)
m.c40 = Constraint(expr= - m.x22 + m.x30 + m.b60 <= 1)
m.c41 = Constraint(expr= - m.x23 + m.x31 + m.b61 <= 1)
m.c42 = Constraint(expr= - m.x22 + m.x30 - m.b60 >= -1)
m.c43 = Constraint(expr= - m.x23 + m.x31 - m.b61 >= -1)
m.c44 = Constraint(expr= - 0.5*m.x24 + m.x30 + m.b60 <= 1)
m.c45 = Constraint(expr= - 0.5*m.x25 + m.x31 + m.b61 <= 1)
m.c46 = Constraint(expr= - 0.5*m.x24 + m.x30 - m.b60 >= -1)
m.c47 = Constraint(expr= - 0.5*m.x25 + m.x31 - m.b61 >= -1)
m.c48 = Constraint(expr= m.x22 - 4.45628648004517*m.b60 <= 0)
m.c49 = Constraint(expr= m.x23 - 4.45628648004517*m.b61 <= 0)
m.c50 = Constraint(expr= m.x24 - 30*m.b60 <= 0)
m.c51 = Constraint(expr= m.x25 - 30*m.b61 <= 0)
m.c52 = Constraint(expr= m.x30 - 15*m.b60 <= 0)
m.c53 = Constraint(expr= m.x31 - 15*m.b61 <= 0)
m.c54 = Constraint(expr=-1.25*log(1 + m.x32) + m.x42 + m.b62 <= 1)
m.c55 = Constraint(expr=-1.25*log(1 + m.x33) + m.x43 + m.b63 <= 1)
m.c56 = Constraint(expr= m.x32 - 3.34221486003388*m.b62 <= 0)
m.c57 = Constraint(expr= m.x33 - 3.34221486003388*m.b63 <= 0)
m.c58 = Constraint(expr= m.x42 - 1.83548069293539*m.b62 <= 0)
m.c59 = Constraint(expr= m.x43 - 1.83548069293539*m.b63 <= 0)
m.c60 = Constraint(expr=-0.9*log(1 + m.x34) + m.x44 + m.b64 <= 1)
m.c61 = Constraint(expr=-0.9*log(1 + m.x35) + m.x45 + m.b65 <= 1)
m.c62 = Constraint(expr= m.x34 - 3.34221486003388*m.b64 <= 0)
m.c63 = Constraint(expr= m.x35 - 3.34221486003388*m.b65 <= 0)
m.c64 = Constraint(expr= m.x44 - 1.32154609891348*m.b64 <= 0)
m.c65 = Constraint(expr= m.x45 - 1.32154609891348*m.b65 <= 0)
m.c66 = Constraint(expr=-log(1 + m.x28) + m.x46 + m.b66 <= 1)
m.c67 = Constraint(expr=-log(1 + m.x29) + m.x47 + m.b67 <= 1)
m.c68 = Constraint(expr= m.x28 - 2.54515263975353*m.b66 <= 0)
m.c69 = Constraint(expr= m.x29 - 2.54515263975353*m.b67 <= 0)
m.c70 = Constraint(expr= m.x46 - 1.26558121681553*m.b66 <= 0)
m.c71 = Constraint(expr= m.x47 - 1.26558121681553*m.b67 <= 0)
m.c72 = Constraint(expr= - 0.9*m.x36 + m.x48 + m.b68 <= 1)
m.c73 = Constraint(expr= - 0.9*m.x37 + m.x49 + m.b69 <= 1)
m.c74 = Constraint(expr= - 0.9*m.x36 + m.x48 - m.b68 >= -1)
m.c75 = Constraint(expr= - 0.9*m.x37 + m.x49 - m.b69 >= -1)
m.c76 = Constraint(expr= m.x36 - 15*m.b68 <= 0)
m.c77 = Constraint(expr= m.x37 - 15*m.b69 <= 0)
m.c78 = Constraint(expr= m.x48 - 13.5*m.b68 <= 0)
m.c79 = Constraint(expr= m.x49 - 13.5*m.b69 <= 0)
m.c80 = Constraint(expr= - 0.6*m.x38 + m.x50 + m.b70 <= 1)
m.c81 = Constraint(expr= - 0.6*m.x39 + m.x51 + m.b71 <= 1)
m.c82 = Constraint(expr= - 0.6*m.x38 + m.x50 - m.b70 >= -1)
m.c83 = Constraint(expr= - 0.6*m.x39 + m.x51 - m.b71 >= -1)
m.c84 = Constraint(expr= m.x38 - 15*m.b70 <= 0)
m.c85 = Constraint(expr= m.x39 - 15*m.b71 <= 0)
m.c86 = Constraint(expr= m.x50 - 9*m.b70 <= 0)
m.c87 = Constraint(expr= m.x51 - 9*m.b71 <= 0)
m.c88 = Constraint(expr= 5*m.b72 + m.x92 <= 0)
m.c89 = Constraint(expr= 4*m.b73 + m.x93 <= 0)
m.c90 = Constraint(expr= 8*m.b74 + m.x94 <= 0)
m.c91 = Constraint(expr= 7*m.b75 + m.x95 <= 0)
m.c92 = Constraint(expr= 6*m.b76 + m.x96 <= 0)
m.c93 = Constraint(expr= 9*m.b77 + m.x97 <= 0)
m.c94 = Constraint(expr= 10*m.b78 + m.x98 <= 0)
m.c95 = Constraint(expr= 9*m.b79 + m.x99 <= 0)
m.c96 = Constraint(expr= 6*m.b80 + m.x100 <= 0)
m.c97 = Constraint(expr= 10*m.b81 + m.x101 <= 0)
m.c98 = Constraint(expr= 7*m.b82 + m.x102 <= 0)
m.c99 = Constraint(expr= 7*m.b83 + m.x103 <= 0)
m.c100 = Constraint(expr= 4*m.b84 + m.x104 <= 0)
m.c101 = Constraint(expr= 3*m.b85 + m.x105 <= 0)
m.c102 = Constraint(expr= 5*m.b86 + m.x106 <= 0)
m.c103 = Constraint(expr= 6*m.b87 + m.x107 <= 0)
m.c104 = Constraint(expr= 2*m.b88 + m.x108 <= 0)
m.c105 = Constraint(expr= 5*m.b89 + m.x109 <= 0)
m.c106 = Constraint(expr= 4*m.b90 + m.x110 <= 0)
m.c107 = Constraint(expr= 7*m.b91 + m.x111 <= 0)
m.c108 = Constraint(expr= 5*m.b72 + m.x92 >= 0)
m.c109 = Constraint(expr= 4*m.b73 + m.x93 >= 0)
m.c110 = Constraint(expr= 8*m.b74 + m.x94 >= 0)
m.c111 = Constraint(expr= 7*m.b75 + m.x95 >= 0)
m.c112 = Constraint(expr= 6*m.b76 + m.x96 >= 0)
m.c113 = Constraint(expr= 9*m.b77 + m.x97 >= 0)
m.c114 = Constraint(expr= 10*m.b78 + m.x98 >= 0)
m.c115 = Constraint(expr= 9*m.b79 + m.x99 >= 0)
m.c116 = Constraint(expr= 6*m.b80 + m.x100 >= 0)
m.c117 = Constraint(expr= 10*m.b81 + m.x101 >= 0)
m.c118 = Constraint(expr= 7*m.b82 + m.x102 >= 0)
m.c119 = Constraint(expr= 7*m.b83 + m.x103 >= 0)
m.c120 = Constraint(expr= 4*m.b84 + m.x104 >= 0)
m.c121 = Constraint(expr= 3*m.b85 + m.x105 >= 0)
m.c122 = Constraint(expr= 5*m.b86 + m.x106 >= 0)
m.c123 = Constraint(expr= 6*m.b87 + m.x107 >= 0)
m.c124 = Constraint(expr= 2*m.b88 + m.x108 >= 0)
m.c125 = Constraint(expr= 5*m.b89 + m.x109 >= 0)
m.c126 = Constraint(expr= 4*m.b90 + m.x110 >= 0)
m.c127 = Constraint(expr= 7*m.b91 + m.x111 >= 0)
m.c128 = Constraint(expr= m.b52 - m.b53 <= 0)
m.c129 = Constraint(expr= m.b54 - m.b55 <= 0)
m.c130 = Constraint(expr= m.b56 - m.b57 <= 0)
m.c131 = Constraint(expr= m.b58 - m.b59 <= 0)
m.c132 = Constraint(expr= m.b60 - m.b61 <= 0)
m.c133 = Constraint(expr= m.b62 - m.b63 <= 0)
m.c134 = Constraint(expr= m.b64 - m.b65 <= 0)
m.c135 = Constraint(expr= m.b66 - m.b67 <= 0)
m.c136 = Constraint(expr= m.b68 - m.b69 <= 0)
m.c137 = Constraint(expr= m.b70 - m.b71 <= 0)
m.c138 = Constraint(expr= m.b72 + m.b73 <= 1)
m.c139 = Constraint(expr= m.b72 + m.b73 <= 1)
m.c140 = Constraint(expr= m.b74 + m.b75 <= 1)
m.c141 = Constraint(expr= m.b74 + m.b75 <= 1)
m.c142 = Constraint(expr= m.b76 + m.b77 <= 1)
m.c143 = Constraint(expr= m.b76 + m.b77 <= 1)
m.c144 = Constraint(expr= m.b78 + m.b79 <= 1)
m.c145 = Constraint(expr= m.b78 + m.b79 <= 1)
m.c146 = Constraint(expr= m.b80 + m.b81 <= 1)
m.c147 = Constraint(expr= m.b80 + m.b81 <= 1)
m.c148 = Constraint(expr= m.b82 + m.b83 <= 1)
m.c149 = Constraint(expr= m.b82 + m.b83 <= 1)
m.c150 = Constraint(expr= m.b84 + m.b85 <= 1)
m.c151 = Constraint(expr= m.b84 + m.b85 <= 1)
m.c152 = Constraint(expr= m.b86 + m.b87 <= 1)
m.c153 = Constraint(expr= m.b86 + m.b87 <= 1)
m.c154 = Constraint(expr= m.b88 + m.b89 <= 1)
m.c155 = Constraint(expr= m.b88 + m.b89 <= 1)
m.c156 = Constraint(expr= m.b90 + m.b91 <= 1)
m.c157 = Constraint(expr= m.b90 + m.b91 <= 1)
m.c158 = Constraint(expr= m.b52 - m.b72 <= 0)
m.c159 = Constraint(expr= - m.b52 + m.b53 - m.b73 <= 0)
m.c160 = Constraint(expr= m.b54 - m.b74 <= 0)
m.c161 = Constraint(expr= - m.b54 + m.b55 - m.b75 <= 0)
m.c162 = Constraint(expr= m.b56 - m.b76 <= 0)
m.c163 = Constraint(expr= - m.b56 + m.b57 - m.b77 <= 0)
m.c164 = Constraint(expr= m.b58 - m.b78 <= 0)
m.c165 = Constraint(expr= - m.b58 + m.b59 - m.b79 <= 0)
m.c166 = Constraint(expr= m.b60 - m.b80 <= 0)
m.c167 = Constraint(expr= - m.b60 + m.b61 - m.b81 <= 0)
m.c168 = Constraint(expr= m.b62 - m.b82 <= 0)
m.c169 = Constraint(expr= - m.b62 + m.b63 - m.b83 <= 0)
m.c170 = Constraint(expr= m.b64 - m.b84 <= 0)
m.c171 = Constraint(expr= - m.b64 + m.b65 - m.b85 <= 0)
m.c172 = Constraint(expr= m.b66 - m.b86 <= 0)
m.c173 = Constraint(expr= - m.b66 + m.b67 - m.b87 <= 0)
m.c174 = Constraint(expr= m.b68 - m.b88 <= 0)
m.c175 = Constraint(expr= - m.b68 + m.b69 - m.b89 <= 0)
m.c176 = Constraint(expr= m.b70 - m.b90 <= 0)
m.c177 = Constraint(expr= - m.b70 + m.b71 - m.b91 <= 0)
m.c178 = Constraint(expr= m.b52 + m.b54 == 1)
m.c179 = Constraint(expr= m.b53 + m.b55 == 1)
m.c180 = Constraint(expr= - m.b56 + m.b62 + m.b64 >= 0)
m.c181 = Constraint(expr= - m.b57 + m.b63 + m.b65 >= 0)
m.c182 = Constraint(expr= - m.b58 + m.b66 >= 0)
m.c183 = Constraint(expr= - m.b59 + m.b67 >= 0)
m.c184 = Constraint(expr= m.b52 + m.b54 - m.b56 >= 0)
m.c185 = Constraint(expr= m.b53 + m.b55 - m.b57 >= 0)
m.c186 = Constraint(expr= m.b52 + m.b54 - m.b58 >= 0)
m.c187 = Constraint(expr= m.b53 + m.b55 - m.b59 >= 0)
m.c188 = Constraint(expr= m.b52 + m.b54 - m.b60 >= 0)
m.c189 = Constraint(expr= m.b53 + m.b55 - m.b61 >= 0)
m.c190 = Constraint(expr= m.b56 - m.b62 >= 0)
m.c191 = Constraint(expr= m.b57 - m.b63 >= 0)
m.c192 = Constraint(expr= m.b56 - m.b64 >= 0)
m.c193 = Constraint(expr= m.b57 - m.b65 >= 0)
m.c194 = Constraint(expr= m.b58 - m.b66 >= 0)
m.c195 = Constraint(expr= m.b59 - m.b67 >= 0)
m.c196 = Constraint(expr= m.b60 - m.b68 >= 0)
m.c197 = Constraint(expr= m.b61 - m.b69 >= 0)
m.c198 = Constraint(expr= m.b60 - m.b70 >= 0)
m.c199 = Constraint(expr= m.b61 - m.b71 >= 0)
| [
"feligongcity17@gmail.com"
] | feligongcity17@gmail.com |
44b39f3df171238655777f1d4aad0ccfdf832114 | 51e19a9bd716d33bf675aa852c460d9cad8b5727 | /ms/python_files/result_check.py | 2e96ca14acd8610c28447cc1896398db9084e397 | [] | no_license | nichohelmut/football_results | 1b039af1366e543a52d6682ea5ef5d392cd23d17 | 3b4f5b7ab72e31298a61e8b7116817f6180c5102 | refs/heads/master | 2023-01-31T21:50:00.665000 | 2020-12-10T16:01:44 | 2020-12-10T16:01:44 | 253,249,586 | 5 | 3 | null | 2020-12-10T16:01:45 | 2020-04-05T14:07:08 | Jupyter Notebook | UTF-8 | Python | false | false | 2,962 | py | import pandas as pd
from database import MySQLDatabase
from auto_download.footy_download import FootyStats
import os
import sys
from sqlalchemy import create_engine
import time
from datetime import date
PATH = os.path.dirname(os.path.abspath(__file__))
BASE_PATH = os.path.dirname(PATH)
path_to_pickle = os.path.join(BASE_PATH, "pickle_files")
path_to_match = os.path.join(BASE_PATH, "germany_stats/match_stats")
path_to_actual = os.path.join(path_to_match, "match_stats_20_21")
# if date.today().weekday() == 0:
# # change to os
# footy = FootyStats(
# path='/ms/germany_stats/match_stats/match_stats_20_21//')
# footy.login()
# footy.clean_dir()
# footy.csv_match_actual()
class ResultCheck:
def __init__(self):
self.results = pd.read_csv(os.path.join(path_to_actual, "germany-bundesliga-matches-2020-to-2021-stats.csv"))
try:
self.dbname = os.getenv("RDS_1_DB_NAME")
self.host = os.getenv("RDS_1_HOST")
self.port = '3306'
self.user = os.getenv("RDS_1_USER")
self.pwd = os.getenv("RDS_1_PASSWORD")
except Exception as e:
print("Error: {}".format(str(e)))
sys.exit(1)
def read_from_db(self):
global db
db = MySQLDatabase()
df = db.get('bookie')
return df
def actual_results(self):
df = self.results
df.index = df.index + 1224
df['goal_diff'] = df['home_team_goal_count'] - df['away_team_goal_count']
for index, row in df[df['status'] == 'complete'].iterrows():
if df['goal_diff'][index] > 0:
df.at[index, 'result'] = 3
elif df['goal_diff'][index] == 0:
df.at[index, 'result'] = 2
else:
df.at[index, 'result'] = 1
return df
def update_mysql(self):
print("Connecting to Database")
url = f'mysql+pymysql://{self.user}:{self.pwd}@{self.host}:{self.port}/{self.dbname}'
engine = create_engine(url)
df = self.read_from_db()
df = df.tail(9)
df_actual_rows = self.actual_results().loc[list(df['index'])]
l_result_last_game = list(df_actual_rows['result'].astype('Int64'))
df['real_result'] = l_result_last_game
try:
df.drop('level_0', axis=1, inplace=True)
except:
pass
os.environ['TZ'] = 'Europe/Amsterdam'
time.tzset()
df["date_time"] = time.strftime('%X %x %Z')
df.to_sql('my_temp', con=engine, if_exists='replace')
sql = """
UPDATE bookie
INNER JOIN my_temp ON bookie.id = my_temp.id
set bookie.real_result = my_temp.real_result, bookie.date_time = my_temp.date_time
WHERE bookie.id = my_temp.id
"""
with engine.begin() as conn:
conn.execute(sql)
print("Successfully updated Bookie table with real results")
| [
"nicholasutikal@gmail.com"
] | nicholasutikal@gmail.com |
0ec3a612342b6999c627497f0a8788d608044816 | 8c2de4da068ba3ed3ce1adf0a113877385b7783c | /hyp_utils/kaldi/steps/nnet3/report/summarize_compute_debug_timing.py | 5c74eaf128c5da16eeba7964877e3bae00778d07 | [
"Apache-2.0"
] | permissive | hyperion-ml/hyperion | a024c718c4552ba3a03aae2c2ca1b8674eaebc76 | c4c9eee0acab1ba572843373245da12d00dfffaa | refs/heads/master | 2023-08-28T22:28:37.624139 | 2022-03-25T16:28:08 | 2022-03-25T16:28:08 | 175,275,679 | 55 | 20 | Apache-2.0 | 2023-09-13T15:35:46 | 2019-03-12T18:40:19 | Python | UTF-8 | Python | false | false | 4,357 | py | #!/usr/bin/env python
# Copyright 2016 Vijayaditya Peddinti.
# Apache 2.0.
# we're using python 3.x style print but want it to work in python 2.x,
from __future__ import print_function
from __future__ import division
import sys
import re
import argparse
# expects the output of nnet3*train with --computation-debug=true
# will run faster if just the lines with "DebugAfterExecute" are provided
# <train-command> |grep DebugAfterExecute | steps/nnet3/report/summarize_compute_debug_timing.py
def GetArgs():
parser = argparse.ArgumentParser(description="Summarizes the timing info from nnet3-*-train --computation.debug=true commands ")
parser.add_argument("--node-prefixes", type=str,
help="list of prefixes. Execution times from nnet3 components with the same prefix"
" will be accumulated. Still distinguishes Propagate and BackPropagate commands"
" --node-prefixes Lstm1,Lstm2,Layer1", default=None)
print(' '.join(sys.argv), file=sys.stderr)
args = parser.parse_args()
if args.node_prefixes is not None:
raise NotImplementedError
# this will be implemented after https://github.com/kaldi-asr/kaldi/issues/944
args.node_prefixes = args.node_prefixes.split(',')
else:
args.node_prefixes = []
return args
# get opening bracket position corresponding to the last closing bracket
def FindOpenParanthesisPosition(string):
string = string.strip()
if string[-1] != ")":
# we don't know how to deal with these strings
return None
string_index = len(string) - 1
closing_parans = []
closing_parans.append(string_index)
string_index -= 1
while string_index >= 0:
if string[string_index] == "(":
if len(closing_parans) == 1:
# this opening bracket corresponds to the last closing bracket
return string_index
else:
closing_parans.pop()
elif string[string_index] == ")":
closing_parans.append(string_index)
string_index -= 1
raise Exception("Malformed string: Could not find opening paranthesis\n\t{0}".format(string))
# input : LOG (nnet3-chain-train:DebugAfterExecute():nnet-compute.cc:144) c68: BLstm1_backward_W_i-xr.Propagate(NULL, m6212(3136:3199, 0:555), &m31(0:63, 0:1023))
# output : BLstm1_backward_W_i-xr.Propagate
def ExtractCommandName(command_string):
# create a concise representation for the the command
# strip off : LOG (nnet3-chain-train:DebugAfterExecute():nnet-compute.cc:144)
command = " ".join(command_string.split()[2:])
# command = c68: BLstm1_backward_W_i-xr.Propagate(NULL, m6212(3136:3199, 0:555), &m31(0:63, 0:1023))
end_position = FindOpenParanthesisPosition(command)
if end_position is not None:
command = command[:end_position]
# command = c68: BLstm1_backward_W_i-xr.Propagate
command = ":".join(command.split(":")[1:]).strip()
# command = BLstm1_backward_W_i-xr.Propagate
return command
def Main():
# Sample Line
# LOG (nnet3-chain-train:DebugAfterExecute():nnet-compute.cc:144) c128: m19 = [] | | time: 0.0007689 secs
debug_regex = re.compile("DebugAfterExecute")
command_times = {}
for line in sys.stdin:
parts = line.split("|")
if len(parts) != 3:
# we don't know how to deal with these lines
continue
if debug_regex.search(parts[0]) is not None:
# this is a line printed in the DebugAfterExecute method
# get the timing info
time_parts = parts[-1].split()
assert(len(time_parts) == 3 and time_parts[-1] == "secs" and time_parts[0] == "time:" )
time = float(time_parts[1])
command = ExtractCommandName(parts[0])
# store the time
try:
command_times[command] += time
except KeyError:
command_times[command] = time
total_time = sum(command_times.values())
sorted_commands = sorted(command_times.items(), key = lambda x: x[1], reverse = True)
for item in sorted_commands:
print("{c} : time {t} : fraction {f}".format(c=item[0], t=item[1], f=float(item[1]) / total_time))
if __name__ == "__main__":
args = GetArgs()
Main()
| [
"jesus.antonio.villalba@gmail.com"
] | jesus.antonio.villalba@gmail.com |
0451ab6c75d806bc370d17a1356de4bb5437faf0 | 1e0ae1f039668a65e480065d671235fc0fff9b52 | /django19day/app01/views/home.py | 50e254c58f308631dd25e2745daad307c072c79f | [] | no_license | aixocm/svndata | a4da91c3c9e1d376abfd46e7cecc3c5c2e340e83 | ee205301f3a1ce11acef98bba927877cb7c4fb0b | refs/heads/master | 2021-01-21T04:39:41.607117 | 2016-07-01T01:48:36 | 2016-07-01T01:48:36 | 47,066,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,184 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from django.shortcuts import render
from app01.forms import home as HomeForm
from app01 import models
def index(request):
# models.UserInfo.objects.all().delete()
# models.UserInfo.objects.create(name="JJJJJ")
#
# after = models.UserInfo.objects.all()
# print after[0].ctime
# dic = {'username':'alex','password':'123'}
# models.SimpleModel.objects.create(**dic)
ret = models.SimpleModel.objects.all()
print ret,type(ret)
ret = models.SimpleModel.objects.all().values('username')
print ret,type(ret)
ret = models.SimpleModel.objects.all().values_list('id','username')
print ret,type(ret)
obj = HomeForm.ImportForm()
return render(request,'home/index.html',{'obj':obj})
def upload(request):
if request.method == "POST":
inp_post = request.POST
inp_files = request.FILES
file_obj = inp_files.get('file_name')
print file_obj.name
print file_obj.size
f=open(file_obj.name,'wb')
for line in file_obj.chunks():
f.write(line)
f.close()
return render(request,'home/upload.html')
| [
"1755897532@qq.com"
] | 1755897532@qq.com |
5671e0966d602e6fe9d571de2403f63157d82dfb | 200682f84e0b72bab9475c9ac39e927a607abf2e | /samples/client/petstore/python/test/test_map_test.py | 913339478015cdaf2c717e6683658b1dc991125a | [
"Apache-2.0"
] | permissive | paveI-fando/swagger-codegen | 4ddba5ac6e65e9acc60cab52909eb165fefc0f6d | 1ba93b5cdda481a2630fffbd4d1ca436b9e37a46 | refs/heads/3.0.0 | 2021-06-30T15:10:00.633706 | 2021-06-08T12:19:05 | 2021-06-08T12:19:05 | 238,979,476 | 0 | 2 | NOASSERTION | 2021-06-08T12:19:39 | 2020-02-07T16:55:42 | Java | UTF-8 | Python | false | false | 948 | py | # coding: utf-8
"""
Swagger Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
OpenAPI spec version: 1.0.0
Contact: apiteam@swagger.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from models.map_test import MapTest # noqa: E501
from swagger_client.rest import ApiException
class TestMapTest(unittest.TestCase):
"""MapTest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testMapTest(self):
"""Test MapTest"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.map_test.MapTest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"hugo.mario.mercado@gmail.com"
] | hugo.mario.mercado@gmail.com |
f7fa8ad78741406c7c875f36cd8a743a697abc07 | b617eb5fb89ce10f62a9b673bfc2b2e2e062d820 | /security.py | 3f81f886b12be10f725055b54f60a03d7cd8085a | [] | no_license | BachirBelkhiri/FLASK_API_Test | d5c34e0c9ce5201a76fbd290a0b446bba1d4f124 | 8e3b137ce9a0a239a04a5f74de54329441a3a493 | refs/heads/master | 2021-06-17T11:44:00.910934 | 2021-02-09T14:57:30 | 2021-02-09T14:57:30 | 161,979,706 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | from werkzeug.security import safe_str_cmp
from models.user import UserModel
def authenticate(username, password):
user = UserModel.find_by_username(username)
if user is not None and safe_str_cmp(user.password, password):
return user
def identity(payload):
user_id = payload['identity']
return UserModel.find_by_id(user_id)
| [
"belkhiribachir16@gmail.com"
] | belkhiribachir16@gmail.com |
f2853294fe31e8ef124e155ab13b9922fd4f73fe | 4127d5fe46ef305f9436b1860259f9062ca1d513 | /chat/migrations/0002_auto_20200327_0542.py | fad84ca2fb91eeefba662ba22c1be37b913978dd | [] | no_license | dlee0528/Apricity | 397ab05a72929c1907aeb8ee3334f90c052f964d | 5d2e8be4f07f4fec5cfbab4d654477577c1b5b2f | refs/heads/master | 2022-10-07T07:52:34.685418 | 2020-06-06T01:50:30 | 2020-06-06T01:50:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 386 | py | # Generated by Django 2.0.7 on 2020-03-27 05:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('chat', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='message',
name='author',
),
migrations.DeleteModel(
name='Message',
),
]
| [
"noreply@github.com"
] | dlee0528.noreply@github.com |
0f8157f6084da4a3f7bcb1b92dfb268e86e28536 | c8908acff3b51a963a742043da66e33915c77114 | /desafio38.py | b827b6661839e2d411866c41a2ae0e14ea6ca78c | [] | no_license | Alm3ida/resolucaoPythonCursoEmVideo | a56dd9829f78130e01ebf91b111dcb6e00faf480 | 06def95453f995a7133f258c7d548df4d96c7698 | refs/heads/main | 2023-02-22T14:39:40.231619 | 2021-01-17T13:48:40 | 2021-01-17T13:48:40 | 324,021,543 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 484 | py | """
Escreva um número que leia dois inteiros e compare-os, mostrando na tela uma mensagem:
- O primeiro valor é maior
- O segundo valor é maior
- Não existe valor maior, os dois são iguais.
"""
a = int(input('Digite o primeiro valor: '))
b = int(input('Digite o segundo valor: '))
if a > b:
print('O primeiro valor ({}) é maior' .format(a))
elif a < b:
print('O segundo valor ({}) é maior' .format(b))
else:
print('Não existe valor maior, os dois são iguais')
| [
"marcus.almeidaif@gmail.com"
] | marcus.almeidaif@gmail.com |
5dc3e8334c7270bfff0fe1b60df56f9f59fa426b | 622d5fbb832e7d4682bf65728dffd7521c170801 | /app_middleware/forms_user.py | e93ece6ed0b15f565840409ea50790420d84ee7a | [] | no_license | ekoabdulaziz96/django_app_starter | bf2d73a94c7d2e22003d709d8ddb6f6000e90113 | e201be3a5beb429abe7f4c49c66078b384f3a235 | refs/heads/master | 2022-12-28T18:09:58.391447 | 2020-10-19T02:39:44 | 2020-10-19T02:39:44 | 305,242,422 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,336 | py | # membuat form
from django import forms
# from django.contrib.auth.models import User as UserModel
from .models import User as UserModel
class UserForm(forms.ModelForm):
error_css_class = 'error'
class Meta:
model = UserModel
fields = (
'first_name' ,
'last_name',
'username',
'email',
'password',
# 'is_superuser',
# 'is_staff',
'role'
# 'is_kprodi',
# 'is_dosen',
# 'is_assesor',
)
labels = {
'first_name': 'Nama Depan',
'last_name' : 'Nama Belakang',
'password' : 'Password default: "default_password"',
# 'is_superuser' : 'Super User',
# 'is_staff' : 'Pakar',
# 'role' : 'Roles Permission Functionality',
'is_kprodi': 'Kepala Prodi',
'is_dosen': 'Dosen',
'is_assesor': 'Assesor Akreditasi',
}
widgets = {
'first_name': forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'nama depan',
'required': 'required',
}
),
'last_name': forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'nama belakang',
'required': 'required',
}
),
'username': forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'nama_depan_belakang',
}
),
'email': forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'nama@gmail.com',
}
),
'password': forms.PasswordInput(
attrs={
'class': 'form-control',
'value': 'indonesiapower_expert',
'readonly':'readonly',
}
),
# 'is_superuser': forms.Select(
# attrs={
# 'class': 'form-control',
# 'required': 'required',
# }
# ),
# 'is_staff': forms.Select(
# attrs={
# 'class': 'form-control',
# 'required': 'required',
# }
# ),
'role': forms.Select(
attrs={
'class': 'form-control',
'required': 'required',
}
),
# 'is_kprodi': forms.CheckboxInput(
# attrs={
# 'class': 'form-control',
# # 'required': 'required',
# }
# ),
# 'is_dosen': forms.CheckboxInput(
# attrs={
# 'class': 'form-control',
# # # 'required': 'required',
# }
# ),
# 'is_assesor': forms.CheckboxInput(
# attrs={
# 'class': 'form-control',
# # # 'required': 'required',
# }
# ),
}
| [
"azizeko12undip@gmail.com"
] | azizeko12undip@gmail.com |
9a2c6feb48de21d40f417f73171cf199568edb90 | 1614cb038f297405edb86871e4035608881ce811 | /QHack/circuit_training_100_template/circuit_training_100_template.py | 5fe5c5630e3ac1248aea11023585f67db50d3c8f | [] | no_license | worog/sandbox | 33bbd430d1e10637443949eb49229a85bbcc7cde | bf169a9f65747f4e50bb970757cd4f77d31924b9 | refs/heads/master | 2021-06-16T23:44:29.112481 | 2021-04-20T16:59:30 | 2021-04-20T16:59:30 | 195,855,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,364 | py | #! /usr/bin/python3
import sys
import pennylane as qml
from pennylane import numpy as np
# DO NOT MODIFY any of these parameters
WIRES = 2
LAYERS = 5
NUM_PARAMETERS = LAYERS * WIRES * 3
def optimize_circuit(params):
"""Minimize the variational circuit and return its minimum value.
The code you write for this challenge should be completely contained within this function
between the # QHACK # comment markers. You should create a device and convert the
variational_circuit function into an executable QNode. Next, you should minimize the variational
circuit using gradient-based optimization to update the input params. Return the optimized value
of the QNode as a single floating-point number.
Args:
params (np.ndarray): Input parameters to be optimized, of dimension 30
Returns:
float: the value of the optimized QNode
"""
optimal_value = 0.0
# QHACK #
# Initialize the device
# dev = ...
dev = qml.device("default.qubit", wires=2)
#steps =200
# Instantiate the QNode
circuit = qml.QNode(variational_circuit, dev)
# Natural gradient descent
gd_cost = []
opt = qml.MomentumOptimizer(stepsize=0.01, momentum=0.9)
steps = 500
theta = params
for _ in range(steps):
theta = opt.step(circuit, theta)
gd_cost.append(circuit(theta))
'''
qng_cost = []
opt = qml.QNGOptimizer(0.01)
theta = params
for _ in range(steps):
theta = opt.step(circuit, theta)
qng_cost.append(circuit(theta))
# Minimize the circuit
def cost(x):
return np.abs(circuit(x) - 0.5)**2
# opt = qml.GradientDescentOptimizer(stepsize=0.4)
opt = qml.AdamOptimizer(stepsize=0.01, beta1=0.9, beta2=0.99, eps=1e-08)
steps = 200
for i in range(steps):
# update the circuit parameters
params = opt.step(cost, params)
'''
optimal_value = circuit(theta)
# QHACK #
# Return the value of the minimized QNode
return optimal_value
def variational_circuit(params):
"""
# DO NOT MODIFY anything in this function! It is used to judge your solution.
This is a template variational quantum circuit containing a fixed layout of gates with variable
parameters. To be used as a QNode, it must either be wrapped with the @qml.qnode decorator or
converted using the qml.QNode function (as shown above).
The output of this circuit is the expectation value of a Hamiltonian. An unknown Hamiltonian
will be used to judge your solution.
Args:
params (np.ndarray): An array of optimizable parameters of shape (30,)
"""
parameters = params.reshape((LAYERS, WIRES, 3))
qml.templates.StronglyEntanglingLayers(parameters, wires=range(WIRES))
return qml.expval(qml.Hermitian(hamiltonian, wires=[0, 1]))
if __name__ == "__main__":
# DO NOT MODIFY anything in this code block
# Load and process Hamiltonian data
hamiltonian = sys.stdin.read()
hamiltonian = hamiltonian.split(",")
hamiltonian = np.array(hamiltonian, float).reshape((2 ** WIRES, 2 ** WIRES))
# Generate random initial parameters
np.random.seed(1967)
initial_params = np.random.random(NUM_PARAMETERS)
minimized_circuit = optimize_circuit(initial_params)
print(f"{minimized_circuit:.6f}")
| [
"wojrog@hotmail.com"
] | wojrog@hotmail.com |
51e92e9a9d300abdec99114e6f531197f2a74889 | 5f118d46fea69b2bacce929f496e9daba807900a | /object_detect/detection.py | f26adce7e038c593b616502888fead14e48ec4da | [] | no_license | shuvamdas/Social-Distance-Detector | aaf106708b980dea06d0b948f7541ee951192e16 | 8ec7dc34651bec338d0f70a963ed3eead7f15eb0 | refs/heads/master | 2022-11-16T05:53:14.663577 | 2020-07-12T07:08:38 | 2020-07-12T07:08:38 | 268,723,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,240 | py | from .social_distancing_config import NMS_THRESH
from .social_distancing_config import MIN_CONF
import numpy as np
import cv2
def detect_people(frame, net, ln, personIdx=0):
(H, W) = frame.shape[:2]
results = []
blob = cv2.dnn.blobFromImage(frame, 1 / 255.0, (416, 416),
swapRB=True, crop=False)
net.setInput(blob)
layerOutputs = net.forward(ln)
boxes = []
centroids = []
confidences = []
for output in layerOutputs:
for detection in output:
scores = detection[5:]
classID = np.argmax(scores)
confidence = scores[classID]
if classID == personIdx and confidence > MIN_CONF:
box = detection[0:4] * np.array([W, H, W, H])
(centerX, centerY, width, height) = box.astype("int")
x = int(centerX - (width / 2))
y = int(centerY - (height / 2))
boxes.append([x, y, int(width), int(height)])
centroids.append((centerX, centerY))
confidences.append(float(confidence))
idxs = cv2.dnn.NMSBoxes(boxes, confidences, MIN_CONF, NMS_THRESH)
if len(idxs) > 0:
for i in idxs.flatten():
(x, y) = (boxes[i][0], boxes[i][1])
(w, h) = (boxes[i][2], boxes[i][3])
r = (confidences[i], (x, y, x + w, y + h), centroids[i])
results.append(r)
return results
| [
"dasshuvam18@gmail.com"
] | dasshuvam18@gmail.com |
383bbc0120928044773563b94ef74c6c0dd20adc | bc5e8c69de5c70c14913c98c8df3bfc4b5bdee12 | /tests/test_test.py | eddbe4474e9e0d9e2f39bc6d0f61ac3b7fc66024 | [
"MIT"
] | permissive | victorromeo/AudioMoth-Sync | 8f9b38cb6c376146a69914ca93409e3b48f41fbc | 5ab1512d9b00f725db4d3ba5f146d42babb8389f | refs/heads/master | 2022-04-03T17:54:16.655562 | 2020-01-30T06:13:41 | 2020-01-30T06:13:41 | 220,343,792 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | import unittest
class TestTest(unittest.TestCase):
def test_test(self):
self.assertEqual("spam".upper(), "SPAM") | [
"victorromeo@github.com"
] | victorromeo@github.com |
a48ca7092695e60baeadc6908bb6eb171c5604c0 | d28750f8cdbedc57e8e272fe53ee730f9135aa53 | /train.py | 95ab613ed56a66208a2d8e48c1c90e8505e678ae | [] | no_license | alchemistwu/CSI5137Project | 2fabeaf606064214bd33e8108727ef7265d7a05a | 720cde6cda8a3598c94a252db3e0797e662cf065 | refs/heads/main | 2023-02-03T21:43:15.290675 | 2020-12-22T04:46:07 | 2020-12-22T04:46:07 | 315,683,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,572 | py | from model_utils import *
import matplotlib.pyplot as plt
import numpy as np
from tensorflow.keras.callbacks import CSVLogger, EarlyStopping
import os
import shutil
import argparse
class SaveBestModel(tf.keras.callbacks.Callback):
"""
Callbacks for saving the model with lowest val_acc
"""
def __init__(self, filepath, model_name, monitor='val_acc'):
super(SaveBestModel, self).__init__()
self.model_name = model_name
self.best_weights = None
self.file_path = filepath
self.best = -float('inf')
self.monitor = monitor
def on_epoch_end(self, epoch, logs=None):
current = logs.get(self.monitor)
if not current:
current = -float('inf')
if np.less(self.best, current):
self.best = current
self.best_weights = self.model.get_weights()
if not os.path.exists(self.file_path):
os.mkdir(self.file_path)
new_path = os.path.join(self.file_path, self.model_name)
if not os.path.exists(new_path):
os.mkdir(new_path)
else:
shutil.rmtree(new_path)
new_path_model = os.path.join(new_path, 'model.tf')
self.model.save_weights(new_path_model)
print("New best model has been saved in %s!" % new_path_model)
print("Best acc: %.4f" % current)
def get_tfDataset(training_directory, test_directory, verbose=False, batch_size=32):
"""
loading directories into tensorflow dataset format
:param training_directory:
:param test_directory:
:param verbose: if Ture, showing some sample from both training and testing dataset.
:param batch_size:
:return: tensorflow dataset object
"""
dataTrain = tf.keras.preprocessing.image_dataset_from_directory(
training_directory,
labels="inferred",
label_mode="int",
color_mode="rgb",
batch_size=batch_size,
image_size=(256, 256),
shuffle=True,
seed=1,
validation_split=None,
subset=None,
interpolation="bilinear",
follow_links=False,
)
dataTrain = dataTrain.map(lambda x, y: (x / 255., y))
dataTest = tf.keras.preprocessing.image_dataset_from_directory(
test_directory,
labels="inferred",
label_mode="int",
color_mode="rgb",
batch_size=batch_size,
image_size=(256, 256),
shuffle=False,
seed=1,
validation_split=None,
subset=None,
interpolation="bilinear",
follow_links=False,
)
dataTest = dataTest.map(lambda x, y: (x / 255., y))
if verbose:
plt.figure(figsize=(10, 10))
for i, (images, labels) in enumerate(dataTrain.take(1)):
for j in range(9):
ax = plt.subplot(3, 3, j + 1)
image = images[j, :, :, :]
label = labels[j]
plt.imshow(image, cmap='gray', vmin=0., vmax=1.)
plt.title(int(label))
plt.axis("off")
plt.show()
plt.figure(figsize=(10, 10))
for i, (images, labels) in enumerate(dataTest.take(1)):
for j in range(9):
ax = plt.subplot(3, 3, j + 1)
image = images[j, :, :, :]
label = labels[j]
plt.imshow(image, cmap='gray', vmin=0., vmax=1.)
plt.title(int(label))
plt.axis("off")
plt.show()
return dataTrain, dataTest
def train(train_directory, test_directory, model_name='res', epoch=100, batch_size=64, multi_gpu=True, pretrain=False,
row=False):
if not os.path.exists('logs'):
os.mkdir('logs')
if row:
train_directory += "row"
test_directory += "row"
dataTrain, dataTest = get_tfDataset(train_directory, test_directory, batch_size=batch_size)
dataTrain = dataTrain.cache()
dataTrain = dataTrain.prefetch(tf.data.experimental.AUTOTUNE)
dataTest = dataTest.cache()
dataTest = dataTest.prefetch(tf.data.experimental.AUTOTUNE)
if multi_gpu:
strategy = tf.distribute.MirroredStrategy()
else:
strategy = tf.distribute.OneDeviceStrategy(device="/gpu:0")
with strategy.scope():
model = get_model(name=model_name, pretrain=True, target_size=256, n_class=9)
if row:
model_name += "_row"
if pretrain:
model_name += "_pretrain"
model.fit(dataTrain, epochs=epoch, validation_data=dataTest, verbose=1, batch_size=batch_size,
callbacks=[SaveBestModel('model', model_name), CSVLogger('logs/%s.csv' % (model_name)),
EarlyStopping(monitor='val_loss', patience=10)])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-single_gpu', type=bool,
help='Training with single GPU support, default is multiple gpu',
dest='multi_gpu', const=False, default=True, nargs='?')
parser.add_argument('-pretrain', type=bool,
help='Initialize the model with ImageNet pretrained weights',
dest='pretrain', const=True, default=False, nargs='?')
parser.add_argument('-row', type=bool,
help='use the row wise entropy comparison',
dest='row', const=True, default=False, nargs='?')
parser.add_argument('-train_dir', type=str,
help='Training directory',
dest='train_dir', default="../data/train_imgs")
parser.add_argument('-test_dir', type=str,
help='Test directory',
dest='test_dir',
default="../data/test_imgs")
parser.add_argument('-batch_size', type=int,
help='batch size',
dest='batch_size',
default=32)
parser.add_argument('-epoch', type=int,
help='number of epochs',
dest='epoch',
default=100)
parser.add_argument('-model', type=str,
help=''' Models to be used, options are: 'res','vgg','googLeNet','dense','mobile', defaut is 'res' ''',
dest='model',
default='res')
args = parser.parse_args()
train(args.train_dir, args.test_dir, multi_gpu=args.multi_gpu,
batch_size=args.batch_size, epoch=args.epoch, model_name=args.model, pretrain=args.pretrain, row=args.row)
| [
"alchemistWu0521@gmail.com"
] | alchemistWu0521@gmail.com |
9db683d8b067434b88efd33b4bffcc4dda485d6e | ab46d158ee8fc6ee6a7b98ca738800e307c8a431 | /Lab_7/Jason_Burke_Lab7b.py | 433d4e0ab69f28d718d0d7af099cc5a519adc58b | [] | no_license | JasonPBurke/Intro-to-Python-class | bc038b72125114efce5ba8f2977679d3b26b559f | b1a9ec79f6091cafb517a9cdba010ce5b10e5e60 | refs/heads/master | 2021-05-14T14:12:48.310390 | 2018-01-02T02:50:32 | 2018-01-02T02:50:32 | 115,967,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,929 | py |
# This program allows you to input and save student
# names to a file
#set a constant for the number of students
student_no = 12
def main():
#create an empty list
students = []
#create an accumulator and prime the while loop
count = 0
#get the user to add students to the list if they want to
while count < student_no:
print('Please enter a student name:')
stu_name = str(input())
#append the students list
students.append(stu_name)
#iterate the accumulator
count += 1
#run the edit_list module and pass in the
#students as an arguement
edit_list(students)
# Open a file to write to
outfile = open('names.txt', 'w')
#write the list to file
for name in students:
outfile.write(name + '\n')
#close the file
outfile.close()
#call the read_list function
print('Here is a list of the contents of the names.txt file:')
read_list()
#define the read list function
def read_list():
infile = open('names.txt', 'r')
#read the contents of names.txt to a list
names_list = infile.readlines()
# Close the file
infile.close()
#strip the \n from each element
index = 0
while index < len(names_list):
names_list[index] = names_list[index].rstrip('\n')
index += 1
#print the contents
print(names_list)
#convert list to tuple
names_tuple = tuple(names_list)
#print (names_tuple)
# Define the edit_list function to sort, reverse, append and insert
# data to the file.
def edit_list(stu_list):
#sort the list alphabetical and then again in reverse order
stu_list.sort()
stu_list.reverse()
# Append the list with the teachers name and insert
# my name at the beginnig of the list
stu_list.append('Polanco')
stu_list.insert(0, 'Burke')
return stu_list
main()
| [
"jpburke77@gmail.com"
] | jpburke77@gmail.com |
72004b00787472a24915c8775c3fad4502e0700f | 2bd0f22053d92f57049a543b2d94584f755e410d | /project/user/models.py | 8628b87a61eca03fc0feeea6a97cb7e5ae83c5e3 | [] | no_license | leanh29/Health_Plus | 952df7e17ab5c63180b28bf18dbb0a07b2ae0818 | 30d46f7f9df6768ce37343016faebdaaf4b57412 | refs/heads/master | 2023-02-05T10:22:29.401210 | 2020-12-24T15:12:34 | 2020-12-24T15:12:34 | 282,677,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | from django.db import models
from django.contrib.auth.models import User
from PIL import Image
class ProfileModel(models.Model):
SEXUAL_CHOICES = (
('m', 'Male'),
('f', 'Female'),
)
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name='profile')
image = models.ImageField(default='profile_pics/default.png', upload_to='profile_pics')
sexual = models.CharField(max_length=1, choices=SEXUAL_CHOICES, null=True)
birthday = models.DateField(null=True)
def __str__(self):
return f'{self.user.username} Profile'
def save(self, force_insert=False,force_update=False, using=None, update_fields=None):
super().save(force_insert,force_update, using, update_fields)
img = Image.open(self.image.path)
if img.height > 300 or img.width > 300:
output_size = (300,300)
img.thumbnail(output_size)
img.save(self.image.path)
| [
"48637725+leanh29@users.noreply.github.com"
] | 48637725+leanh29@users.noreply.github.com |
1661717ff6cc5aec7bbbdaa61fc1b31e3267f18f | d15774ddd1b1b30b2d73585f0b3362cf42cb6b27 | /test1/test1/pipelines.py | c596da92ac243121fa2cd68f0a070c778c7f3503 | [] | no_license | hngfng/99lib-scrapy | 7c614684e6863bb3be563b272df1bf673f807b0b | 1c53089df8cd2de02e0876b9f795eb029ee0b932 | refs/heads/master | 2020-03-30T03:08:07.175691 | 2018-09-28T03:00:25 | 2018-09-28T03:00:25 | 150,670,893 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from _cffi_backend import callback
import scrapy
class Test1Pipeline(object):
def process_item(self, item, spider):
return item | [
"hngfng@sina.cn"
] | hngfng@sina.cn |
bfefb8f7dce5f5450783b6cdaaa8b63263777679 | 817e4d8a52407ec6ede659c7840679811803e1c5 | /0112-path-sum.py | ff05479b9293793bcf7b5bcf76814105b508460d | [] | no_license | kgremban/leetcode | 7320061c202bad30fe904939a6248d513e032e60 | 87b3ce9bd02f582892334005c04b9c5da21bf947 | refs/heads/master | 2020-08-11T09:31:08.492135 | 2020-03-20T22:48:30 | 2020-03-20T22:48:30 | 214,540,251 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | # Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def hasPathSum(self, root: TreeNode, sum: int) -> bool:
if not root:
return False
if not root.left and not root.right:
return root.val == sum
left = False
right = False
if root.left:
left = self.hasPathSum(root.left, (sum - root.val))
if root.right:
right = self.hasPathSum(root.right, (sum - root.val))
return left or right
def main():
sol = Solution()
root = TreeNode(5)
root.left = TreeNode(4)
root.right = TreeNode(8)
root.left.left = TreeNode(11)
root.right.left = TreeNode(13)
root.right.right = TreeNode(4)
root.left.left.left = TreeNode(7)
root.left.left.right = TreeNode(2)
root.right.right.right = TreeNode(1)
num = 22
print(sol.hasPathSum(root, num))
if __name__ == "__main__":
main() | [
"kgremban@microsoft.com"
] | kgremban@microsoft.com |
4eadce987312cc642bf7d10d5855eca2fdd2a8f7 | ddd35c693194aefb9c009fe6b88c52de7fa7c444 | /Live 10.1.18/novation/transport.py | cc9566884eb4863f6ff57a14a5556297de25949c | [] | no_license | notelba/midi-remote-scripts | 819372d9c22573877c7912091bd8359fdd42585d | e3ec6846470eed7da8a4d4f78562ed49dc00727b | refs/heads/main | 2022-07-30T00:18:33.296376 | 2020-10-04T00:00:12 | 2020-10-04T00:00:12 | 301,003,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,052 | py | # uncompyle6 version 3.7.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.8.5 (default, Aug 12 2020, 00:00:00)
# [GCC 10.2.1 20200723 (Red Hat 10.2.1-1)]
# Embedded file name: c:\Jenkins\live\output\Live\win_64_static\Release\python-bundle\MIDI Remote Scripts\novation\transport.py
# Compiled at: 2020-05-05 13:23:29
from __future__ import absolute_import, print_function, unicode_literals
from ableton.v2.base import listens
from ableton.v2.control_surface.components import TransportComponent as TransportComponentBase
from ableton.v2.control_surface.control import ButtonControl, ToggleButtonControl
class TransportComponent(TransportComponentBase):
play_button = ToggleButtonControl(toggled_color=b'Transport.PlayOn', untoggled_color=b'Transport.PlayOff')
capture_midi_button = ButtonControl()
def __init__(self, *a, **k):
super(TransportComponent, self).__init__(*a, **k)
self._metronome_toggle.view_transform = lambda v: b'Transport.MetronomeOn' if v else b'Transport.MetronomeOff'
self.__on_can_capture_midi_changed.subject = self.song
self.__on_can_capture_midi_changed()
@play_button.toggled
def _on_play_button_toggled(self, is_toggled, _):
if is_toggled:
self.song.stop_playing()
self.song.is_playing = is_toggled
@capture_midi_button.pressed
def capture_midi_button(self, _):
try:
if self.song.can_capture_midi:
self.song.capture_midi()
except RuntimeError:
pass
@listens(b'can_capture_midi')
def __on_can_capture_midi_changed(self):
self.capture_midi_button.color = (b'Transport.Capture{}').format(b'On' if self.song.can_capture_midi else b'Off')
def _update_button_states(self):
super(TransportComponent, self)._update_button_states()
self.continue_playing_button.color = (b'Transport.Continue{}').format(b'Off' if self.song.is_playing else b'On')
# okay decompiling /home/deniz/data/projects/midiremote/Live 10.1.18/novation/transport.pyc
| [
"notelba@example.com"
] | notelba@example.com |
90a6a6ac9f3922e04813b1bd587d393dbf1cebca | 57513acbee10889f6803499cd4c20cfdcdbd2576 | /06.py | 13806dc43c7d69ee0e873bcac2d8c227a1092732 | [] | no_license | alanespinozaz/UNEMI-ESTRUCTURA-DE-DATOS | c0c1a5420033deaf0f54079befbbc6776cb2c3ce | 694633924c56370bf2a142b167cef54152000f7e | refs/heads/main | 2023-06-16T02:05:02.902495 | 2021-07-18T04:39:49 | 2021-07-18T04:39:49 | 375,894,882 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | """Funciones matematicas"""
import math
num1, num2, num, men = 12.572, 15.4, 4, '1234'
print(math.ceil(num1), '\t',math.floor(num1))
print(round(num1,1),'\t',type(num),'\t',type(men))
# funciones de cadenas
mensaje = "Hola" + "mundo " + "Python"
men1=mensaje.split()
men2=' '.join(men1)
print(mensaje[0],mensaje[0:4],men1,men2)
print(mensaje.find("mundo"), mensaje.lower())
# funciones de fecha
from datetime import datetime,timedelta,date
hoy, fdia = datetime.now(), date.today()
futuro = hoy + timedelta(days=30)
dif, aa, mm, dd = futuro - hoy, hoy.year, hoy.month, hoy.day
fecha = date(aa, mm, dd+2)
print(hoy, fdia, futuro, dif, fecha) | [
"noreply@github.com"
] | alanespinozaz.noreply@github.com |
b29d2f1ec8e055a57341fd8087b050e88968b806 | eab81ff76637addfbc969e3fddc29aac5e608c50 | /code/cracking-the-coding-interview/cap_10_sorting_and_searching/10.9.sorted_matrix_search.py | 3bd91b5ed1b877729d4f88750a05d28a7ea668b5 | [] | no_license | hadrizia/coding | f629a023a1a9a303ee0480c6c2708ea92403314b | a1e7304a57922428398291de49f3eff78f8e7e55 | refs/heads/master | 2021-11-11T10:16:05.478379 | 2021-11-05T15:26:43 | 2021-11-05T15:26:43 | 183,800,041 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 742 | py | '''
Input :
mat = [
[1, 5, 9],
[14, 20, 21],
[30, 34, 43]
]
x = 14
Output : (1, 0)
Input :
mat = [
[1, 5, 9, 11],
[14, 20, 21, 26],
[30, 34, 43, 50]
]
x = 42
Output : -1
'''
def search(matrix, x):
if not matrix:
return
row = 0
col = len(matrix[0]) - 1
while row < len(matrix) and col >= 0:
if matrix[row][col] == x:
return (row, col)
elif matrix[row][col] > x:
col -= 1
else:
row += 1
return -1
def tests():
mat = [
[1, 5, 9],
[14, 20, 21],
[30, 34, 43]
]
assert search(mat, 14) == (1, 0)
mat2 = [
[1, 5, 9, 11],
[14, 20, 21, 26],
[30, 34, 43, 50]
]
assert search(mat2, 42) == -1
tests() | [
"hadrizia.santos@ccc.ufcg.edu.br"
] | hadrizia.santos@ccc.ufcg.edu.br |
633d15da8f8d292855e72ef1aadde8b8bc9e4c5a | 296ee7f58031d7c22fb92790b2cda5e881b5301a | /1st year/1st semester/FPLab/Assignment.09/controller.py | 35bc5895d688418e6b95e16f4a331e22c9a6cb50 | [] | no_license | arazi47/university-projects | 6cf7adb1fade977580e51d3842162eb033769088 | 4ebd70c857fac2565b44f6e40904cba209a138a1 | refs/heads/master | 2023-01-08T10:28:23.846702 | 2020-05-05T08:54:37 | 2020-05-05T08:54:37 | 105,974,765 | 0 | 2 | null | 2022-12-27T14:41:06 | 2017-10-06T06:43:06 | HTML | UTF-8 | Python | false | false | 13,900 | py | from client import Client
from movie import Movie
from rental import Rental
from external_input_output.mysql_io import MySQL_IO
from external_input_output.file_io import File_IO
from external_input_output.binary_file_io import Binary_File_IO
from external_input_output.json_io import JSON_IO
from Repository import Repository
from NewRepo import NewRepository
import datetime
class Controller:
def __init__(self):
#self.db = MySQL_IO()
self.db = File_IO()
#self.db = Binary_File_IO()
#self.db = JSON_IO()
self.clients = NewRepository()
self.movies = NewRepository()
self.rentedMovies = NewRepository()
self.undoList = []
self.redoList = []
def getClients(self):
return self.clients
def getMovies(self):
return self.movies
def getRentedMovies(self):
return self.rentedMovies
def getUndoList(self):
return self.undoList
def clearUndoList(self):
self.undoList.clear()
def getRedoList(self):
return self.redoList
def clearRedoList(self):
self.redoList.clear()
# for pickle
def setClientList(self, clientList):
self.clients = clientList
def setMovieList(self, movieList):
self.movies = movieList
def setRentalList(self, rentalList):
self.rentedMovies = rentalList
# *************************************************** #
# Movie functions
def addMovie(self, title, description, genre):
'''
:param title: string
:param description: string
:param genre: string
:return: None
'''
movie = Movie(len(self.movies) + 1, title, description, genre)
self.movies[self.movies.get_index()] = movie
self.undoList.append(['addMovie', title, description, genre])
def removeMovie(self, title):
'''
:param title: string
:return: True if the movie with the given name gets removed
False otherwise
'''
for movie in self.movies:
if movie.getTitle() == title:
#self.movies.remove_by_index(movie.getId() - 1)
del self.movies[movie.getId() - 1]
self.fixIndices(self.movies)
self.undoList.append(['removeMovie', title])
return True
return False
def updateMovie(self, oldTitle, newTitle, newDescription, newGenre):
'''
:param oldTitle: string
:param newTitle: string
:param newDescription: string
:param newGenre: string
:return: True if the movie with the given name gets updated
False otherwise
'''
for movie in self.movies:
if movie.getTitle() == oldTitle:
# we also need to store the old description and genre, for redo
self.undoList.append(['updateMovie', oldTitle, movie.getDescription(), movie.getGenre(), newTitle, newDescription, newGenre])
movie.setTitle(newTitle)
movie.setDescription(newDescription)
movie.setGenre(newGenre)
return True
return False
def searchMovieMatch(self, title):
'''
:param name: string
:return: list of matching names (full match or partial match)
'''
outputList = []
for movie in self.movies:
if title == movie.getTitle() or title in movie.getTitle():
outputList.append(movie.getTitle())
return outputList
def clearMovies(self):
self.movies.clear()
# *************************************************** #
# Client functions
def addClient(self, name):
'''
:param name: string
:return: None
'''
newClient = Client(len(self.clients) + 1, name)
self.clients[newClient.getId()] = newClient
self.undoList.append(['addClient', name])
def removeClient(self, name):
'''
:param name: string
:return: True if the client with the given name gets removed
False otherwise
'''
for client in self.clients:
if client.getName() == name:
#self.clients.remove_by_index(client.getId() - 1)
del self.clients[client.getId() - 1]
self.fixIndices(self.clients)
self.undoList.append(['removeClient', name])
return True
return False
def updateName(self, oldName, newName):
'''
:param oldName: string
:param newName: string
:return: True if the client with the given name gets updated
False otherwise
'''
for client in self.clients:
if client.getName() == oldName:
client.setName(newName)
self.undoList.append(['updateName', oldName, newName])
return True
return False
def fixIndices(self, lst):
'''
:param lst: list - can be either self.clients or self.movies
:return: None
The function needs to be called when we are removing a client or a movie,
because of the way I handle setting the ids of clients/movies.
steps to reproduce (don't call fixIndices to see the issue):
add name1, name2, name3
remove name2
add name4
print clients
'''
for i in range(len(lst)):
lst[i].setId(i + 1)
def searchClientMatch(self, name):
'''
:param name: string
:return: list of matching names (full match or partial match)
'''
outputList = []
for client in self.clients:
if client.getName() == name or name in client.getName():
outputList.append(client.getName())
return outputList
def getClientIdByName(self, name):
for client in self.clients:
if client.getName() == name:
return client.getId()
return -1
def clearClients(self):
self.clients.clear()
# *************************************************** #
# Rental functions
def addRental(self, movieId, clientId, rentedDate):
'''
:param movieId: integer
:param clientId: integer
:param rentedDate: datetime
:return: None
'''
rental = Rental(len(self.rentedMovies) + 1, movieId, clientId, rentedDate)
# mark the movie as rented
self.movies[movieId - 1].setRented(True)
self.movies[movieId - 1].setTotalRentalDays(self.movies[movieId - 1].getTotalRentalDays() + 7)
self.clients[clientId - 1].setTotalRentalDays(self.clients[clientId - 1].getTotalRentalDays() + 7)
self.rentedMovies[rental.getRentalId()] = rental
#self.undoList.append(['addRental', movieId, clientId, rentedDate])
self.undoList.append(['addRental', rental])
def undoAddRental(self, rental):
'''
:param rental: object of class Rental
:return: None
'''
#self.rentedMovies.remove_by_index(rental.getRentalId() - 1)
#self.rentedMovies.pop(rental.getRentalId() - 1)
del self.rentedMovies[rental.getRentalId() - 1]
self.clients[rental.getClientId() - 1].setTotalRentalDays(self.clients[rental.getClientId() - 1].getTotalRentalDays() - 7)
self.movies[rental.getMovieId() - 1].setRented(False)
self.movies[rental.getMovieId() - 1].setTotalRentalDays(self.movies[rental.getMovieId() - 1].getTotalRentalDays() - 7)
def redoRental(self, rental):
'''
:param rental: object of class rental
:return: None
'''
rental.setReturnedDate(rental.getRentedDate() - datetime.timedelta(days = 1))
self.rentedMovies[rental.getRentalId()] = rental
self.clients[rental.getClientId() - 1].setTotalRentalDays(self.clients[rental.getClientId() - 1].getTotalRentalDays() + 7)
self.movies[rental.getMovieId() - 1].setRented(True)
self.movies[rental.getMovieId() - 1].setTotalRentalDays(self.movies[rental.getMovieId() - 1].getTotalRentalDays() + 7)
def returnMovie(self, movieId, returnDate):
'''
:param movieId: integer
:return: None
Set the returned date of the rental of the movie with the specified movieId to today
'''
rtl = 0
for rental in self.rentedMovies:
if rental.getMovieId() == movieId:
rental.setReturnedDate(returnDate)
# the movie is not rented anymore
self.movies[movieId- 1].setRented(False)
# actually, we don't want to do that, right?
# subtract from total client rental days
#self.clients[rental.getClientId() - 1].setTotalRentalDays(self.clients[rental.getClientId() - 1].getTotalRentalDays() - 7)
rtl = rental
break
self.undoList.append(['returnMovie', movieId, rtl, returnDate])
def undoReturnMovie(self, rental):
rental.setReturnedDate(rental.getRentedDate() - datetime.timedelta(days = 1))
self.movies[rental.getMovieId() - 1].setRented(True)
def getRentalByMovieId(self, movieId):
'''
:param movieId: integer
:return: rental containing the given movieId
-1 otherwise
'''
for rental in self.rentedMovies:
if rental.getMovieId() == movieId:
return rental
return -1
# obsolete, most likely we can delete this!
def isMovieRented(self, movie):
'''
:param movie: an object of class Movie
:return: True if the movie is rented
False otherwise
'''
for rentedMovie in self.rentedMovies:
if rentedMovie.getMovieId() == movie.getMovieId():
return True
return False
def clearRentals(self):
self.rentedMovies.clear()
# *************************************************** #
# Misc functions
def filter(self, lst, checkFct, value = None):
filteredLst = []
for elem in lst:
if value != None:
if elem.checkFct() == value:
filteredLst.append(elem)
else:
if elem.checkFct():
filteredLst.append(elem)
return filteredLst
def lessThanCmp(self, a, b):
return a < b
def greaterThanCmp(self, a, b):
return a > b
def sort(self, lst, cmp):
# Counting sort
copyLst = lst[:]
indexLst = [0] * len(lst)
for i in range(len(lst) - 1):
for j in range(i + 1, len(lst)):
if cmp(lst[i], lst[j]):
indexLst[i] += 1
else:
indexLst[j] += 1
for i in range(len(lst)):
lst[indexLst[i]] = copyLst[i]
return lst
def getMostActiveClients(self):
'''
:return: The client list sorted in decreasing order by their total rental days
'''
sortedClients = self.clients[:]
sortedClients.sort(key = lambda client: client.getTotalRentalDays(), reverse = True)
return sortedClients
def undo(self):
if len(self.undoList) == 0:
return
# get the last element of undoList
lastCommand = self.undoList[-1]
if lastCommand[0] == 'addClient':
self.removeClient(lastCommand[1])
elif lastCommand[0] == 'removeClient':
self.addClient(lastCommand[1])
elif lastCommand[0] == 'addMovie':
self.removeMovie(lastCommand[1])
elif lastCommand[0] == 'removeMovie':
self.addMovie(lastCommand[1], lastCommand[2], lastCommand[3])
# @todo change this to updateClient(Name)
elif lastCommand[0] == 'updateName':
self.updateName(lastCommand[2], lastCommand[1])
elif lastCommand[0] == 'updateMovie':
# 0 1 2 3 4 5 6
# 'updateMovie' oldTitle oldDesc oldGenre newTitle newDesc newGenre
self.updateMovie(lastCommand[4], lastCommand[1], lastCommand[2], lastCommand[3])
elif lastCommand[0] == 'addRental':
#self.returnMovie(lastCommand[1], lastCommand[3])
self.undoAddRental(lastCommand[1])
elif lastCommand[0] == 'returnMovie':
self.undoReturnMovie(lastCommand[2])
# so we can redo the last operation
self.redoList.append(lastCommand)
# we're done with this command, let's get rid of it
self.undoList.pop()
def redo(self):
if len(self.redoList) == 0:
return
lastCommand = self.redoList[-1]
if lastCommand[0] == 'addClient':
self.addClient(lastCommand[1])
elif lastCommand[0] == 'removeClient':
self.removeClient(lastCommand[1])
elif lastCommand[0] == 'addMovie':
self.addMovie(lastCommand[1], lastCommand[2], lastCommand[3])
elif lastCommand[0] == 'removeMovie':
self.removeMovie(lastCommand[1])
elif lastCommand[0] == 'updateName':
self.updateName(lastCommand[1], lastCommand[2])
elif lastCommand[0] == 'updateMovie':
self.updateMovie(lastCommand[1], lastCommand[4], lastCommand[5], lastCommand[6])
elif lastCommand[0] == 'addRental':
# mai trebuie lucrat p-aici
#self.addRental(lastCommand[1], lastCommand[2], lastCommand[3])
self.redoRental(lastCommand[1])
elif lastCommand[0] == 'returnMovie':
self.returnMovie(lastCommand[1], lastCommand[3])
self.redoList.pop()
| [
"razialexis43@gmail.com"
] | razialexis43@gmail.com |
7efde29ab90be6c730c10581a2d41f6270435e08 | 365581d00a61aa5903ec374ba663bed8e2793ab6 | /exhibitors/search_indexes.py | 4b10965d52367e9fbff80abbf5f395221941b37e | [
"MIT"
] | permissive | betoesquivel/fil2014 | 5171c92d73a96dd481884db33f320cfb2effddfb | 4c2e9188769096391bb206b76ed1ab8bd2ff66a1 | refs/heads/master | 2020-05-04T13:13:32.848991 | 2014-08-25T14:13:41 | 2014-08-25T14:13:41 | 20,413,882 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | from haystack import indexes
from exhibitors.models import Book
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
priority = indexes.IntegerField(model_attr='priority')
def get_model(self):
return Book
def index_queryset(self, using=None):
"""Used when the entire index for model is updated."""
#return self.get_model().objects.filter(priority__lte=datetime.datetime.now())
return self.get_model().objects.all()
| [
"ppbeto94@gmail.com"
] | ppbeto94@gmail.com |
29243555fe038a4a170cef7a594a58bdf9507195 | 17b2f9b13ac0c6600bf1dc27794a879b11897fdb | /rl/milp_model/slave_level_models/chiller3_evap_4nc.py | 9dafdee7a11fecf40fc7d2843f836ea57c829d78 | [] | no_license | zchiam002/vecmc_codes_zhonglin | d17aa8dd0723891b61606ee4ffe9cf332f1f84a9 | fa71e7a5013b1f408717bed034c6d1eafc5a8a9e | refs/heads/master | 2020-11-30T10:27:50.933317 | 2020-08-19T03:09:55 | 2020-08-19T03:09:55 | 230,373,260 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,910 | py | ## This is a chiller model, formulated as an input to a quadratic program
##This function is to check the unit type, make sure that all units defined in this file are of the same type
##There are only 4 types of units
##layers
##process
##utility
##utility_mt
def checktype_chiller3_evap_4nc (unit_type): ##Input the unit type here
unit_type = 'utility'
return unit_type
def chiller3_evap_4nc (ch3_4nc_mdv, utilitylist, streams, cons_eqns, cons_eqns_terms):
from chiller3_evap_4nc_compute import chiller3_evap_4nc_compute
import pandas as pd
import numpy as np
##This is the evaporator side of chiller3_evap_4nc
##Model description:
##Built based on Gordon-Ng's Universal Chiller model
##The COP is kept constant at 4 predefined steps in equally spaced intervals
##The inputs to this function are variables to the eyes of the Master optimizer
##ch3_4nc_mdv --- the master decision variables which are used as parameters at this stage
##utilitylist --- a dataframe to hold essential values for writing the MILP script
##streams --- a dataframe to write connections to other units
##cons_eqns --- additional constraints are explicitly stated here
##cons_eqns_terms --- the terms to the constraints
##Defining inputs
##Processing list of master decision variables
ch3_4nc_evap_ret_temp = ch3_4nc_mdv['Value'][0]
ch3_4nc_ctin = ch3_4nc_mdv['Value'][1]
ch3_4nc_tenwkflow = ch3_4nc_mdv['Value'][2] ##Total flowrate through all evaporators of all chillers
ch3_4nc_steps = ch3_4nc_mdv['Value'][3] ##The number of piecewise linear pieces
##Defined constants
ch3_4nc_rated_cap = 7330
ch3_4nc_b0 = 1.35049420632748
ch3_4nc_b1 = -134.853705222833
ch3_4nc_b2 = 0.00430128306723068
ch3_4nc_qc_coeff = 1.10348067074030
ch3_4nc_cp = 4.2
ch3_4nc_min_flow = 0.5 * 820.4531779
##Calling a compute file to process dependent values
##Placing the values in a numpy array for easy data handling
ch3_4nc_dc = np.zeros((10, 1))
ch3_4nc_dc[0,0] = ch3_4nc_evap_ret_temp
ch3_4nc_dc[1,0] = ch3_4nc_ctin
ch3_4nc_dc[2,0] = ch3_4nc_rated_cap
ch3_4nc_dc[3,0] = ch3_4nc_b0
ch3_4nc_dc[4,0] = ch3_4nc_b1
ch3_4nc_dc[5,0] = ch3_4nc_b2
ch3_4nc_dc[6,0] = ch3_4nc_qc_coeff
ch3_4nc_dc[7,0] = ch3_4nc_tenwkflow
ch3_4nc_dc[8,0] = ch3_4nc_steps
ch3_4nc_ret_vals = chiller3_evap_4nc_compute(ch3_4nc_dc)
#################################################################################################################################################################################################
##Unit definition
##Evaporator stepwise
for i in range (0, int(ch3_4nc_steps)):
ud = {}
ud['Name'] = 'ch3_4nc_' + str(i + 1) + '_evap'
ud['Variable1'] = 'm_perc' ##Percentage of flowrate from the entire evaporator network
ud['Variable2'] = 't_out' ##Chilled water setpoint temperature
ud['Fmin_v1'] = 0
ud['Fmax_v1'] = 1 ##Maximum percentage is 100%
ud['Fmin_v2'] = 0 ##The minimum supply temperature of the chiller is 1 deg
ud['Fmax_v2'] = 1 ##The maximum supply temperature of the chiller is that of the return temperature
ud['Coeff_v1_2'] = 0 ##This illustrates the relationship between the variables
ud['Coeff_v1_1'] = ((ch3_4nc_tenwkflow * ch3_4nc_cp * 998.2 * (ch3_4nc_evap_ret_temp - 273.15 - 1) / 3600)) ##The relationship is Qe = m_evap_perc*m_total*Cp*Tevap_in - m_evap_perc*m_total*Cp*Tevap_out
ud['Coeff_v2_2'] = 0
ud['Coeff_v2_1'] = 0
ud['Coeff_v1_v2'] = -((ch3_4nc_tenwkflow * ch3_4nc_cp * 998.2 * (ch3_4nc_evap_ret_temp - 273.15 - 1)/ 3600))
ud['Coeff_cst'] = 0
ud['Fmin'] = ch3_4nc_ret_vals['lb'][i] * ch3_4nc_rated_cap
ud['Fmax'] = ch3_4nc_ret_vals['ub'][i] * ch3_4nc_rated_cap
ud['Cost_v1_2'] = 0
ud['Cost_v1_1'] = 0
ud['Cost_v2_2'] = 0
ud['Cost_v2_1'] = 0
ud['Cost_v1_v2'] = 0
ud['Cost_cst'] = 0
ud['Cinv_v1_2'] = 0
ud['Cinv_v1_1'] = 0
ud['Cinv_v2_2'] = 0
ud['Cinv_v2_1'] = 0
ud['Cinv_v1_v2'] = 0
ud['Cinv_cst'] = 0
ud['Power_v1_2'] = 0
ud['Power_v1_1'] = (ch3_4nc_ret_vals['grad'][i] * ch3_4nc_tenwkflow * 4.2 * (ch3_4nc_evap_ret_temp - 273.15 - 1) * 998.2 / 3600)
ud['Power_v2_2'] = 0
ud['Power_v2_1'] = 0
ud['Power_v1_v2'] = -(ch3_4nc_ret_vals['grad'][i] * ch3_4nc_tenwkflow * 4.2 * (ch3_4nc_evap_ret_temp - 273.15 - 1) * 998.2 / 3600)
ud['Power_cst'] = ch3_4nc_ret_vals['int'][i]
ud['Impact_v1_2'] = 0
ud['Impact_v1_1'] = 0
ud['Impact_v2_2'] = 0
ud['Impact_v2_1'] = 0
ud['Impact_v1_v2'] = 0
ud['Impact_cst'] = 0
unitinput = [ud['Name'], ud['Variable1'], ud['Variable2'], ud['Fmin_v1'], ud['Fmax_v1'], ud['Fmin_v2'], ud['Fmax_v2'], ud['Coeff_v1_2'],
ud['Coeff_v1_1'], ud['Coeff_v2_2'], ud['Coeff_v2_1'], ud['Coeff_v1_v2'], ud['Coeff_cst'], ud['Fmin'], ud['Fmax'], ud['Cost_v1_2'],
ud['Cost_v1_1'], ud['Cost_v2_2'], ud['Cost_v2_1'], ud['Cost_v1_v2'], ud['Cost_cst'], ud['Cinv_v1_2'], ud['Cinv_v1_1'], ud['Cinv_v2_2'],
ud['Cinv_v2_1'], ud['Cinv_v1_v2'], ud['Cinv_cst'], ud['Power_v1_2'], ud['Power_v1_1'], ud['Power_v2_2'], ud['Power_v2_1'],
ud['Power_v1_v2'], ud['Power_cst'], ud['Impact_v1_2'], ud['Impact_v1_1'], ud['Impact_v2_2'], ud['Impact_v2_1'], ud['Impact_v1_v2'],
ud['Impact_cst']]
unitdf = pd.DataFrame(data = [unitinput], columns=['Name', 'Variable1', 'Variable2', 'Fmin_v1', 'Fmax_v1', 'Fmin_v2', 'Fmax_v2', 'Coeff_v1_2', 'Coeff_v1_1', 'Coeff_v2_2', 'Coeff_v2_1',
'Coeff_v1_v2', 'Coeff_cst', 'Fmin', 'Fmax', 'Cost_v1_2', 'Cost_v1_1', 'Cost_v2_2', 'Cost_v2_1', 'Cost_v1_v2', 'Cost_cst', 'Cinv_v1_2',
'Cinv_v1_1', 'Cinv_v2_2', 'Cinv_v2_1', 'Cinv_v1_v2', 'Cinv_cst', 'Power_v1_2', 'Power_v1_1', 'Power_v2_2', 'Power_v2_1', 'Power_v1_v2',
'Power_cst', 'Impact_v1_2', 'Impact_v1_1', 'Impact_v2_2', 'Impact_v2_1', 'Impact_v1_v2', 'Impact_cst'])
utilitylist = utilitylist.append(unitdf, ignore_index=True)
####################################################################################################################################################################################################
##Stream definition
##Evaporator stepwise
for i in range (0, int(ch3_4nc_steps)):
##Stream --- temperature
stream = {}
stream['Parent'] = 'ch3_4nc_' + str(i + 1) + '_evap'
stream['Type'] = 'temp_chil'
stream['Name'] = 'ch3_4nc_' + str(i + 1) + '_evap_tout'
stream['Layer'] = 'chil2sp1_temp'
stream['Stream_coeff_v1_2'] = 0
stream['Stream_coeff_v1_1'] = (273.15 + 1)
stream['Stream_coeff_v2_2'] = 0
stream['Stream_coeff_v2_1'] = 0
stream['Stream_coeff_v1_v2'] = (ch3_4nc_evap_ret_temp - 273.15 - 1)
stream['Stream_coeff_cst'] = 0
stream['InOut'] = 'out'
streaminput = [stream['Parent'], stream['Type'], stream['Name'], stream['Layer'], stream['Stream_coeff_v1_2'], stream['Stream_coeff_v1_1'], stream['Stream_coeff_v2_2'],
stream['Stream_coeff_v2_1'], stream['Stream_coeff_v1_v2'], stream['Stream_coeff_cst'], stream['InOut']]
streamdf = pd.DataFrame(data = [streaminput], columns=['Parent', 'Type', 'Name', 'Layer', 'Stream_coeff_v1_2', 'Stream_coeff_v1_1', 'Stream_coeff_v2_2', 'Stream_coeff_v2_1',
'Stream_coeff_v1_v2', 'Stream_coeff_cst', 'InOut'])
streams = streams.append(streamdf, ignore_index=True)
##Stream --- flowrate
stream = {}
stream['Parent'] = 'ch3_4nc_' + str(i + 1) + '_evap'
stream['Type'] = 'flow'
stream['Name'] = 'ch3_4nc_' + str(i + 1) + '_evap_mfout'
stream['Layer'] = 'ch3_2_ch3evapnwk_flow'
stream['Stream_coeff_v1_2'] = 0
stream['Stream_coeff_v1_1'] = ch3_4nc_tenwkflow
stream['Stream_coeff_v2_2'] = 0
stream['Stream_coeff_v2_1'] = 0
stream['Stream_coeff_v1_v2'] = 0
stream['Stream_coeff_cst'] = 0
stream['InOut'] = 'out'
streaminput = [stream['Parent'], stream['Type'], stream['Name'], stream['Layer'], stream['Stream_coeff_v1_2'], stream['Stream_coeff_v1_1'], stream['Stream_coeff_v2_2'],
stream['Stream_coeff_v2_1'], stream['Stream_coeff_v1_v2'], stream['Stream_coeff_cst'], stream['InOut']]
streamdf = pd.DataFrame(data = [streaminput], columns=['Parent', 'Type', 'Name', 'Layer', 'Stream_coeff_v1_2', 'Stream_coeff_v1_1', 'Stream_coeff_v2_2', 'Stream_coeff_v2_1',
'Stream_coeff_v1_v2', 'Stream_coeff_cst', 'InOut'])
streams = streams.append(streamdf, ignore_index=True)
############################################################################################################################################################################################################
##Constraint definition
##Equation definitions
##Ensure that the totaluse is equals to 0 or 1
eqn = {}
eqn['Name'] = 'totaluse_ch3_e_4nc'
eqn['Type'] = 'unit_binary'
eqn['Sign'] = 'less_than_equal_to'
eqn['RHS_value'] = 1
eqninput = [eqn['Name'], eqn['Type'], eqn['Sign'], eqn['RHS_value']]
eqninputdf = pd.DataFrame(data = [eqninput], columns = ['Name', 'Type', 'Sign', 'RHS_value'])
cons_eqns = cons_eqns.append(eqninputdf, ignore_index=True)
eqn = {}
eqn['Name'] = 'ch3_e_4nc_flow_min'
eqn['Type'] = 'stream_limit_modified'
eqn['Sign'] = 'greater_than_equal_to'
eqn['RHS_value'] = 0
eqninput = [eqn['Name'], eqn['Type'], eqn['Sign'], eqn['RHS_value']]
eqninputdf = pd.DataFrame(data = [eqninput], columns = ['Name', 'Type', 'Sign', 'RHS_value'])
cons_eqns = cons_eqns.append(eqninputdf, ignore_index=True)
##Equation terms
for i in range (0, int(ch3_4nc_steps)):
term = {}
term['Parent_unit'] = 'ch3_4nc_' + str(i + 1) + '_evap'
term['Parent_eqn'] = 'totaluse_ch3_e_4nc'
term['Parent_stream'] = '-' ##Only applicable for stream_limit types
term['Coefficient'] = 1
term['Coeff_v1_2'] = 0 ##Only applicable for stream_limit_modified types
term['Coeff_v1_1'] = 0
term['Coeff_v2_2'] = 0
term['Coeff_v2_1'] = 0
term['Coeff_v1_v2'] = 0
term['Coeff_cst'] = 0
terminput = [term['Parent_unit'], term['Parent_eqn'], term['Parent_stream'], term['Coefficient'], term['Coeff_v1_2'],
term['Coeff_v1_1'], term['Coeff_v2_2'], term['Coeff_v2_1'], term['Coeff_v1_v2'], term['Coeff_cst']]
terminputdf = pd.DataFrame(data = [terminput], columns = ['Parent_unit', 'Parent_eqn', 'Parent_stream', 'Coefficient',
'Coeff_v1_2', 'Coeff_v1_1', 'Coeff_v2_2', 'Coeff_v2_1', 'Coeff_v1_v2',
'Coeff_cst'])
cons_eqns_terms = cons_eqns_terms.append(terminputdf, ignore_index=True)
term = {}
term['Parent_unit'] = 'ch3_4nc_' + str(i + 1) + '_evap'
term['Parent_eqn'] = 'ch3_e_4nc_flow_min'
term['Parent_stream'] = '-' ##Only applicable for stream_limit types
term['Coefficient'] = 0
term['Coeff_v1_2'] = 0 ##Only applicable for stream_limit_modified types
term['Coeff_v1_1'] = ch3_4nc_tenwkflow
term['Coeff_v2_2'] = 0
term['Coeff_v2_1'] = 0
term['Coeff_v1_v2'] = 0
term['Coeff_cst'] = -ch3_4nc_min_flow
terminput = [term['Parent_unit'], term['Parent_eqn'], term['Parent_stream'], term['Coefficient'], term['Coeff_v1_2'],
term['Coeff_v1_1'], term['Coeff_v2_2'], term['Coeff_v2_1'], term['Coeff_v1_v2'], term['Coeff_cst']]
terminputdf = pd.DataFrame(data = [terminput], columns = ['Parent_unit', 'Parent_eqn', 'Parent_stream', 'Coefficient',
'Coeff_v1_2', 'Coeff_v1_1', 'Coeff_v2_2', 'Coeff_v2_1', 'Coeff_v1_v2',
'Coeff_cst'])
cons_eqns_terms = cons_eqns_terms.append(terminputdf, ignore_index=True)
return utilitylist, streams, cons_eqns, cons_eqns_terms | [
"czlin90@gmail.com"
] | czlin90@gmail.com |
15b0f03273beda208f60ff2a90b3618a11954c31 | c34fd27daeafba1c7b6137aa5080e6f7b63f41ff | /Eso.API.Discovery/discovery-env/Lib/site-packages/aniso8601/builders/python.py | a2205e9c4ea3c96a49db2c66880998d8cc52179b | [
"MIT"
] | permissive | afgbeveridge/EsotericLanguagesToolkit | 5705ca491aa2da22ba85b635480854b6fe5eb41d | 05f391f5c03c9fc7dd60f7f4ef89e480315dc1bc | refs/heads/master | 2023-06-20T16:33:34.368992 | 2021-07-28T22:32:26 | 2021-07-28T22:32:26 | 285,102,073 | 1 | 0 | null | 2021-03-25T01:53:25 | 2020-08-04T21:17:58 | Python | UTF-8 | Python | false | false | 22,600 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2021, Brandon Nielsen
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
import datetime
from aniso8601.builders import (BaseTimeBuilder, DateTuple, DatetimeTuple,
Limit, TimeTuple, TupleBuilder, cast,
range_check)
from aniso8601.exceptions import (DayOutOfBoundsError,
HoursOutOfBoundsError, ISOFormatError,
LeapSecondError, MidnightBoundsError,
MinutesOutOfBoundsError,
MonthOutOfBoundsError,
SecondsOutOfBoundsError,
WeekOutOfBoundsError, YearOutOfBoundsError)
from aniso8601.utcoffset import UTCOffset
from collections import namedtuple
from functools import partial
DAYS_PER_YEAR = 365
DAYS_PER_MONTH = 30
DAYS_PER_WEEK = 7
HOURS_PER_DAY = 24
MINUTES_PER_HOUR = 60
MINUTES_PER_DAY = MINUTES_PER_HOUR * HOURS_PER_DAY
SECONDS_PER_MINUTE = 60
SECONDS_PER_DAY = MINUTES_PER_DAY * SECONDS_PER_MINUTE
MICROSECONDS_PER_SECOND = int(1e6)
MICROSECONDS_PER_MINUTE = 60 * MICROSECONDS_PER_SECOND
MICROSECONDS_PER_HOUR = 60 * MICROSECONDS_PER_MINUTE
MICROSECONDS_PER_DAY = 24 * MICROSECONDS_PER_HOUR
MICROSECONDS_PER_WEEK = 7 * MICROSECONDS_PER_DAY
MICROSECONDS_PER_MONTH = DAYS_PER_MONTH * MICROSECONDS_PER_DAY
MICROSECONDS_PER_YEAR = DAYS_PER_YEAR * MICROSECONDS_PER_DAY
TIMEDELTA_MAX_DAYS = datetime.timedelta.max.days
FractionalComponent = namedtuple('FractionalComponent', ['principal', 'microsecondremainder'])
def year_range_check(valuestr, limit):
YYYYstr = valuestr
#Truncated dates, like '19', refer to 1900-1999 inclusive,
#we simply parse to 1900
if len(valuestr) < 4:
#Shift 0s in from the left to form complete year
YYYYstr = valuestr.ljust(4, '0')
return range_check(YYYYstr, limit)
def fractional_range_check(conversion, valuestr, limit):
if valuestr is None:
return None
if '.' in valuestr:
castfunc = partial(_cast_to_fractional_component, conversion)
else:
castfunc = int
value = cast(valuestr, castfunc, thrownmessage=limit.casterrorstring)
if type(value) is FractionalComponent:
tocheck = float(valuestr)
else:
tocheck = int(valuestr)
if limit.min is not None and tocheck < limit.min:
raise limit.rangeexception(limit.rangeerrorstring)
if limit.max is not None and tocheck > limit.max:
raise limit.rangeexception(limit.rangeerrorstring)
return value
def _cast_to_fractional_component(conversion, floatstr):
#Splits a string with a decimal point into an int, and
#int representing the floating point remainder as a number
#of microseconds, determined by multiplying by conversion
intpart, floatpart = floatstr.split('.')
intvalue = int(intpart)
preconvertedvalue = int(floatpart)
convertedvalue = ((preconvertedvalue * conversion) //
(10 ** len(floatpart)))
return FractionalComponent(intvalue, convertedvalue)
class PythonTimeBuilder(BaseTimeBuilder):
#0000 (1 BC) is not representable as a Python date
DATE_YYYY_LIMIT = Limit('Invalid year string.',
datetime.MINYEAR, datetime.MAXYEAR, YearOutOfBoundsError,
'Year must be between {0}..{1}.'
.format(datetime.MINYEAR, datetime.MAXYEAR),
year_range_check)
TIME_HH_LIMIT = Limit('Invalid hour string.',
0, 24, HoursOutOfBoundsError,
'Hour must be between 0..24 with '
'24 representing midnight.',
partial(fractional_range_check, MICROSECONDS_PER_HOUR))
TIME_MM_LIMIT = Limit('Invalid minute string.',
0, 59, MinutesOutOfBoundsError,
'Minute must be between 0..59.',
partial(fractional_range_check, MICROSECONDS_PER_MINUTE))
TIME_SS_LIMIT = Limit('Invalid second string.',
0, 60, SecondsOutOfBoundsError,
'Second must be between 0..60 with '
'60 representing a leap second.',
partial(fractional_range_check, MICROSECONDS_PER_SECOND))
DURATION_PNY_LIMIT = Limit('Invalid year duration string.',
None, None, YearOutOfBoundsError,
None,
partial(fractional_range_check, MICROSECONDS_PER_YEAR))
DURATION_PNM_LIMIT = Limit('Invalid month duration string.',
None, None, MonthOutOfBoundsError,
None,
partial(fractional_range_check, MICROSECONDS_PER_MONTH))
DURATION_PNW_LIMIT = Limit('Invalid week duration string.',
None, None, WeekOutOfBoundsError,
None,
partial(fractional_range_check, MICROSECONDS_PER_WEEK))
DURATION_PND_LIMIT = Limit('Invalid day duration string.',
None, None, DayOutOfBoundsError,
None,
partial(fractional_range_check, MICROSECONDS_PER_DAY))
DURATION_TNH_LIMIT = Limit('Invalid hour duration string.',
None, None, HoursOutOfBoundsError,
None,
partial(fractional_range_check, MICROSECONDS_PER_HOUR))
DURATION_TNM_LIMIT = Limit('Invalid minute duration string.',
None, None, MinutesOutOfBoundsError,
None,
partial(fractional_range_check, MICROSECONDS_PER_MINUTE))
DURATION_TNS_LIMIT = Limit('Invalid second duration string.',
None, None, SecondsOutOfBoundsError,
None,
partial(fractional_range_check, MICROSECONDS_PER_SECOND))
DATE_RANGE_DICT = BaseTimeBuilder.DATE_RANGE_DICT
DATE_RANGE_DICT['YYYY'] = DATE_YYYY_LIMIT
TIME_RANGE_DICT = {'hh': TIME_HH_LIMIT, 'mm': TIME_MM_LIMIT, 'ss': TIME_SS_LIMIT}
DURATION_RANGE_DICT = {'PnY': DURATION_PNY_LIMIT,
'PnM': DURATION_PNM_LIMIT,
'PnW': DURATION_PNW_LIMIT,
'PnD': DURATION_PND_LIMIT,
'TnH': DURATION_TNH_LIMIT,
'TnM': DURATION_TNM_LIMIT,
'TnS': DURATION_TNS_LIMIT}
@classmethod
def build_date(cls, YYYY=None, MM=None, DD=None, Www=None, D=None,
DDD=None):
YYYY, MM, DD, Www, D, DDD = cls.range_check_date(YYYY, MM, DD, Www, D, DDD)
if MM is None:
MM = 1
if DD is None:
DD = 1
if DDD is not None:
return PythonTimeBuilder._build_ordinal_date(YYYY, DDD)
if Www is not None:
return PythonTimeBuilder._build_week_date(YYYY, Www,
isoday=D)
return datetime.date(YYYY, MM, DD)
@classmethod
def build_time(cls, hh=None, mm=None, ss=None, tz=None):
#Builds a time from the given parts, handling fractional arguments
#where necessary
hours = 0
minutes = 0
seconds = 0
microseconds = 0
hh, mm, ss, tz = cls.range_check_time(hh, mm, ss, tz)
if type(hh) is FractionalComponent:
hours = hh.principal
microseconds = hh.microsecondremainder
elif hh is not None:
hours = hh
if type(mm) is FractionalComponent:
minutes = mm.principal
microseconds = mm.microsecondremainder
elif mm is not None:
minutes = mm
if type(ss) is FractionalComponent:
seconds = ss.principal
microseconds = ss.microsecondremainder
elif ss is not None:
seconds = ss
hours, minutes, seconds, microseconds = PythonTimeBuilder._distribute_microseconds(microseconds, (hours, minutes, seconds), (MICROSECONDS_PER_HOUR, MICROSECONDS_PER_MINUTE, MICROSECONDS_PER_SECOND))
#Move midnight into range
if hours == 24:
hours = 0
#Datetimes don't handle fractional components, so we use a timedelta
if tz is not None:
return (datetime.datetime(1, 1, 1,
hour=hours,
minute=minutes,
tzinfo=cls._build_object(tz))
+ datetime.timedelta(seconds=seconds,
microseconds=microseconds)
).timetz()
return (datetime.datetime(1, 1, 1,
hour=hours,
minute=minutes)
+ datetime.timedelta(seconds=seconds,
microseconds=microseconds)
).time()
@classmethod
def build_datetime(cls, date, time):
return datetime.datetime.combine(cls._build_object(date),
cls._build_object(time))
@classmethod
def build_duration(cls, PnY=None, PnM=None, PnW=None, PnD=None, TnH=None,
TnM=None, TnS=None):
#PnY and PnM will be distributed to PnD, microsecond remainder to TnS
PnY, PnM, PnW, PnD, TnH, TnM, TnS = cls.range_check_duration(PnY, PnM, PnW, PnD, TnH, TnM, TnS)
seconds = TnS.principal
microseconds = TnS.microsecondremainder
return datetime.timedelta(days=PnD,
seconds=seconds,
microseconds=microseconds,
minutes=TnM,
hours=TnH,
weeks=PnW)
@classmethod
def build_interval(cls, start=None, end=None, duration=None):
start, end, duration = cls.range_check_interval(start, end, duration)
if start is not None and end is not None:
#<start>/<end>
startobject = cls._build_object(start)
endobject = cls._build_object(end)
return (startobject, endobject)
durationobject = cls._build_object(duration)
#Determine if datetime promotion is required
datetimerequired = (duration.TnH is not None
or duration.TnM is not None
or duration.TnS is not None
or durationobject.seconds != 0
or durationobject.microseconds != 0)
if end is not None:
#<duration>/<end>
endobject = cls._build_object(end)
#Range check
if type(end) is DateTuple and datetimerequired is True:
#<end> is a date, and <duration> requires datetime resolution
return (endobject,
cls.build_datetime(end, TupleBuilder.build_time())
- durationobject)
return (endobject,
endobject
- durationobject)
#<start>/<duration>
startobject = cls._build_object(start)
#Range check
if type(start) is DateTuple and datetimerequired is True:
#<start> is a date, and <duration> requires datetime resolution
return (startobject,
cls.build_datetime(start, TupleBuilder.build_time())
+ durationobject)
return (startobject,
startobject
+ durationobject)
@classmethod
def build_repeating_interval(cls, R=None, Rnn=None, interval=None):
startobject = None
endobject = None
R, Rnn, interval = cls.range_check_repeating_interval(R, Rnn, interval)
if interval.start is not None:
startobject = cls._build_object(interval.start)
if interval.end is not None:
endobject = cls._build_object(interval.end)
if interval.duration is not None:
durationobject = cls._build_object(interval.duration)
else:
durationobject = endobject - startobject
if R is True:
if startobject is not None:
return cls._date_generator_unbounded(startobject,
durationobject)
return cls._date_generator_unbounded(endobject,
-durationobject)
iterations = int(Rnn)
if startobject is not None:
return cls._date_generator(startobject, durationobject, iterations)
return cls._date_generator(endobject, -durationobject, iterations)
@classmethod
def build_timezone(cls, negative=None, Z=None, hh=None, mm=None, name=''):
negative, Z, hh, mm, name = cls.range_check_timezone(negative, Z, hh, mm, name)
if Z is True:
#Z -> UTC
return UTCOffset(name='UTC', minutes=0)
tzhour = int(hh)
if mm is not None:
tzminute = int(mm)
else:
tzminute = 0
if negative is True:
return UTCOffset(name=name, minutes=-(tzhour * 60 + tzminute))
return UTCOffset(name=name, minutes=tzhour * 60 + tzminute)
@classmethod
def range_check_duration(cls, PnY=None, PnM=None, PnW=None, PnD=None,
TnH=None, TnM=None, TnS=None, rangedict=None):
years = 0
months = 0
days = 0
weeks = 0
hours = 0
minutes = 0
seconds = 0
microseconds = 0
PnY, PnM, PnW, PnD, TnH, TnM, TnS = BaseTimeBuilder.range_check_duration(PnY, PnM, PnW, PnD, TnH, TnM, TnS, rangedict=cls.DURATION_RANGE_DICT)
if PnY is not None:
if type(PnY) is FractionalComponent:
years = PnY.principal
microseconds = PnY.microsecondremainder
else:
years = PnY
if years * DAYS_PER_YEAR > TIMEDELTA_MAX_DAYS:
raise YearOutOfBoundsError('Duration exceeds maximum timedelta size.')
if PnM is not None:
if type(PnM) is FractionalComponent:
months = PnM.principal
microseconds = PnM.microsecondremainder
else:
months = PnM
if months * DAYS_PER_MONTH > TIMEDELTA_MAX_DAYS:
raise MonthOutOfBoundsError('Duration exceeds maximum timedelta size.')
if PnW is not None:
if type(PnW) is FractionalComponent:
weeks = PnW.principal
microseconds = PnW.microsecondremainder
else:
weeks = PnW
if weeks * DAYS_PER_WEEK > TIMEDELTA_MAX_DAYS:
raise WeekOutOfBoundsError('Duration exceeds maximum timedelta size.')
if PnD is not None:
if type(PnD) is FractionalComponent:
days = PnD.principal
microseconds = PnD.microsecondremainder
else:
days = PnD
if days > TIMEDELTA_MAX_DAYS:
raise DayOutOfBoundsError('Duration exceeds maximum timedelta size.')
if TnH is not None:
if type(TnH) is FractionalComponent:
hours = TnH.principal
microseconds = TnH.microsecondremainder
else:
hours = TnH
if hours // HOURS_PER_DAY > TIMEDELTA_MAX_DAYS:
raise HoursOutOfBoundsError('Duration exceeds maximum timedelta size.')
if TnM is not None:
if type(TnM) is FractionalComponent:
minutes = TnM.principal
microseconds = TnM.microsecondremainder
else:
minutes = TnM
if minutes // MINUTES_PER_DAY > TIMEDELTA_MAX_DAYS:
raise MinutesOutOfBoundsError('Duration exceeds maximum timedelta size.')
if TnS is not None:
if type(TnS) is FractionalComponent:
seconds = TnS.principal
microseconds = TnS.microsecondremainder
else:
seconds = TnS
if seconds // SECONDS_PER_DAY > TIMEDELTA_MAX_DAYS:
raise SecondsOutOfBoundsError('Duration exceeds maximum timedelta size.')
years, months, weeks, days, hours, minutes, seconds, microseconds = PythonTimeBuilder._distribute_microseconds(microseconds, (years, months, weeks, days, hours, minutes, seconds), (MICROSECONDS_PER_YEAR, MICROSECONDS_PER_MONTH, MICROSECONDS_PER_WEEK, MICROSECONDS_PER_DAY, MICROSECONDS_PER_HOUR, MICROSECONDS_PER_MINUTE, MICROSECONDS_PER_SECOND))
#Note that weeks can be handled without conversion to days
totaldays = years * DAYS_PER_YEAR + months * DAYS_PER_MONTH + days
#Check against timedelta limits
if totaldays + weeks * DAYS_PER_WEEK + hours // HOURS_PER_DAY + minutes // MINUTES_PER_DAY + seconds // SECONDS_PER_DAY > TIMEDELTA_MAX_DAYS:
raise DayOutOfBoundsError('Duration exceeds maximum timedelta size.')
return (None, None, weeks, totaldays, hours, minutes, FractionalComponent(seconds, microseconds))
@classmethod
def range_check_interval(cls, start=None, end=None, duration=None):
#Handles concise format, range checks any potential durations
if start is not None and end is not None:
#<start>/<end>
#Handle concise format
if cls._is_interval_end_concise(end) is True:
end = cls._combine_concise_interval_tuples(start, end)
return (start, end, duration)
durationobject = cls._build_object(duration)
if end is not None:
#<duration>/<end>
endobject = cls._build_object(end)
#Range check
if type(end) is DateTuple:
enddatetime = cls.build_datetime(end, TupleBuilder.build_time())
if enddatetime - datetime.datetime.min < durationobject:
raise YearOutOfBoundsError('Interval end less than minimium date.')
else:
mindatetime = datetime.datetime.min
if end.time.tz is not None:
mindatetime = mindatetime.replace(tzinfo=endobject.tzinfo)
if endobject - mindatetime < durationobject:
raise YearOutOfBoundsError('Interval end less than minimium date.')
else:
#<start>/<duration>
startobject = cls._build_object(start)
#Range check
if type(start) is DateTuple:
startdatetime = cls.build_datetime(start, TupleBuilder.build_time())
if datetime.datetime.max - startdatetime < durationobject:
raise YearOutOfBoundsError('Interval end greater than maximum date.')
else:
maxdatetime = datetime.datetime.max
if start.time.tz is not None:
maxdatetime = maxdatetime.replace(tzinfo=startobject.tzinfo)
if maxdatetime - startobject < durationobject:
raise YearOutOfBoundsError('Interval end greater than maximum date.')
return (start, end, duration)
@staticmethod
def _build_week_date(isoyear, isoweek, isoday=None):
if isoday is None:
return (PythonTimeBuilder._iso_year_start(isoyear)
+ datetime.timedelta(weeks=isoweek - 1))
return (PythonTimeBuilder._iso_year_start(isoyear)
+ datetime.timedelta(weeks=isoweek - 1, days=isoday - 1))
@staticmethod
def _build_ordinal_date(isoyear, isoday):
#Day of year to a date
#https://stackoverflow.com/questions/2427555/python-question-year-and-day-of-year-to-date
builtdate = (datetime.date(isoyear, 1, 1)
+ datetime.timedelta(days=isoday - 1))
return builtdate
@staticmethod
def _iso_year_start(isoyear):
#Given an ISO year, returns the equivalent of the start of the year
#on the Gregorian calendar (which is used by Python)
#Stolen from:
#http://stackoverflow.com/questions/304256/whats-the-best-way-to-find-the-inverse-of-datetime-isocalendar
#Determine the location of the 4th of January, the first week of
#the ISO year is the week containing the 4th of January
#http://en.wikipedia.org/wiki/ISO_week_date
fourth_jan = datetime.date(isoyear, 1, 4)
#Note the conversion from ISO day (1 - 7) and Python day (0 - 6)
delta = datetime.timedelta(days=fourth_jan.isoweekday() - 1)
#Return the start of the year
return fourth_jan - delta
@staticmethod
def _date_generator(startdate, timedelta, iterations):
currentdate = startdate
currentiteration = 0
while currentiteration < iterations:
yield currentdate
#Update the values
currentdate += timedelta
currentiteration += 1
@staticmethod
def _date_generator_unbounded(startdate, timedelta):
currentdate = startdate
while True:
yield currentdate
#Update the value
currentdate += timedelta
@staticmethod
def _distribute_microseconds(todistribute, recipients, reductions):
#Given a number of microseconds as int, a tuple of ints length n
#to distribute to, and a tuple of ints length n to divide todistribute
#by (from largest to smallest), returns a tuple of length n + 1, with
#todistribute divided across recipients using the reductions, with
#the final remainder returned as the final tuple member
results = []
remainder = todistribute
for index, reduction in enumerate(reductions):
additional, remainder = divmod(remainder, reduction)
results.append(recipients[index] + additional)
#Always return the remaining microseconds
results.append(remainder)
return tuple(results)
| [
"tony.beveridge@outlook.co.nz"
] | tony.beveridge@outlook.co.nz |
508ab1d2b665646b30ba5c01e963cb06091f02b8 | 44b1779a5148fe2c273046d6f6dc5da89d6cfd75 | /networkx_to_neo4j/__init__.py | 01a7c39ecab90b7b36f8bb0eb0eb9610274af2fb | [] | no_license | leokster/networkx_to_neo4j | 11faeec9e0e0a20f534867a68776429f865161ac | 613c8356e85a257c7f80dcf0775203d6cf7cfb3b | refs/heads/master | 2022-12-03T17:24:21.940100 | 2020-08-07T15:29:45 | 2020-08-07T15:29:45 | 285,862,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,032 | py | from neo4j import GraphDatabase
import neo4j
import numpy as np
import networkx as nx
import progressbar
def write_networkx_to_neo4j(tx:neo4j.Session, graph:nx.DiGraph, label_field:str = "labels", type_field:str = "labels", graph_uid:str = None):
'''
:param tx: neo4j Session object
:param graph: a networkx DiGraph object, containing the graph which should be uploaded
:param label_field: name of the attribute of networkx nodes, which should be taken as the node label in neo4j
:param type_field: name of the attribute of networkx edges, which should be taken as the edge type in ne4j
:param graph_uid: a uniqe identifier which will be added as attribute graph_uid to each node and edge
'''
if graph_uid is None:
graph_uid = int(time.time())
i=0
with progressbar.ProgressBar(max_value=len(graph.nodes)+len(graph.edges)) as bar:
for node in graph.nodes:
i += 1
bar.update(i)
__add_node_to_neo4j(tx, node, graph.nodes[node], graph_uid, label_field)
for u,v,c in graph.edges.data():
i += 1
bar.update(i)
if isinstance(u, np.int64):
u = int(u)
if isinstance(v, np.int64):
v = int(v)
__add_edge_to_neo4j(tx, (u,v), c, graph_uid, type_field)
def __add_node_to_neo4j(tx:neo4j.Session, node, attr, graph_uid, label_field):
if "contraction" in attr.keys():
attr.pop("contraction")
create_str = ["MERGE (n:{label_field}{{graph_uid:$graph_uid, node_id:$node_id}})".format(label_field=attr.get(label_field, "DUMMY_LABEL"))]
create_str = create_str + ["SET n.{key}=${key}".format(key=key) for key in attr.keys() if key != label_field]
create_str = create_str +["SET n.node_id=$node_id"]
attr["graph_uid"] = graph_uid
attr["node_id"] = node
tx.run(" ".join(create_str), **attr)
def __add_edge_to_neo4j(tx:neo4j.Session, edge, attr, graph_uid, type_field):
if "node_id" in attr.keys():
attr.pop("node_id")
create_str = ["MATCH (n1{graph_uid:$graph_uid, node_id:$n1}), (n2{graph_uid:$graph_uid, node_id:$n2})"]
create_str = create_str + ["CREATE (n1)-[e:{type_field}{{graph_uid:$graph_uid}}]->(n2)".format(type_field=attr.get(type_field, "DUMMY_TYPE"))]
create_str = create_str + ["SET e.{key}=${key}".format(key=key) for key in attr.keys() if key != type_field]
attr["graph_uid"] = graph_uid
attr["n1"] = edge[0]
attr["n2"] = edge[1]
tx.run(" ".join(create_str), **attr)
if __name__ == "__main__":
graph = nx.gn_graph(20)
nx.set_node_attributes(graph,"labels", "MY_NODE")
nx.set_edge_attributes(graph,"types", "MY_EDGE")
driver = GraphDatabase.driver(uri="bolt://xxx.xxx.xxx.xxx:7687", auth=("neo4j_user", "neo4j_pw"), max_connection_lifetime = 1000)
with driver.session() as session:
session.write_transaction(write_networkx_to_neo4j, graph, label_field="labels", type_field="types", graph_uid="I'm a uniqe graph")
| [
"rohnert@student.ethz.ch"
] | rohnert@student.ethz.ch |
65be1dacf17113aa4f8049d8080929bf3d549da8 | f831f2574c3cb58c7917f6cec60beb2893cf3195 | /settings_config/urls.py | 5cb0abf3807f710eafef39db16ee8f715aa86de9 | [] | no_license | niyara-muradova/driver-front | 8fb49138588ce4d80324325198d1848ed57787bb | 8fbf1872e1f20da8857796d94b9a255b7e421786 | refs/heads/master | 2022-11-26T20:45:31.720044 | 2020-08-12T12:35:32 | 2020-08-12T12:35:32 | 287,009,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | from django.urls import path
from settings_config import views
urlpatterns = [
path('configs/', views.conf_choose, name='conf-choose'),
path('devices/', views.devices, name='devices')
] | [
"18planeta@gmail.com"
] | 18planeta@gmail.com |
2825b9df49cbd304d9af26404a4298c52ad3dae4 | 276b41398db43c33314e822667592dc37cd11795 | /python/phevaluator/__init__.py | 8d574de81a338a83bebab2a9fd2c3e47608df9ff | [
"Apache-2.0"
] | permissive | HenryRLee/PokerHandEvaluator | f7dcaab12453a88f8e19dec66e7c15e4113e6a36 | 4715359b497a62ab00071a4754557da6dd49f241 | refs/heads/master | 2023-07-06T18:09:44.216288 | 2023-07-05T11:44:42 | 2023-07-05T11:44:42 | 53,184,524 | 269 | 90 | Apache-2.0 | 2023-09-03T12:27:15 | 2016-03-05T05:05:24 | C | UTF-8 | Python | false | false | 436 | py | """Package for evaluating a poker hand."""
from . import hash as hash_ # FIXME: `hash` collides to built-in function
from . import tables
from .card import Card
from .evaluator import _evaluate_cards, evaluate_cards
from .evaluator_omaha import _evaluate_omaha_cards, evaluate_omaha_cards
from .utils import sample_cards
__all__ = ["tables", "Card", "evaluate_cards", "evaluate_omaha_cards", "sample_cards"]
__docformat__ = "google"
| [
"noreply@github.com"
] | HenryRLee.noreply@github.com |
56168049cd4f49194e331486fb1189f3acaab815 | b65900cceb163b255473606ba920f528fe99327b | /maximum-subarray/maximum-subarray.py | 8b62f9b4dad43d2b709882186be759337be5f95c | [] | no_license | xieziwei99/leetcode-python | 01b57ad128076d35d41c6a3e54d9133d423be2b5 | d622a770ad0667b6e0ab637881044d9b85a8b13b | refs/heads/master | 2020-06-21T17:52:03.932505 | 2019-10-18T11:07:09 | 2019-10-18T11:07:09 | 197,519,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,830 | py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
@Description: 最大子串和问题
@author: xieziwei99
@Create Date: 2019/8/7
"""
from typing import List
class Solution:
@staticmethod
def max_sub_array(nums: List[int]) -> int:
if not [i for i in nums if i > 0]: # 当列表为空时成立,即nums中全都不大于0时
return max(nums)
temp = 0
the_max = nums[0]
for i in range(len(nums)):
temp += nums[i]
if temp <= 0:
temp = 0
elif temp > the_max:
the_max = temp
return the_max
def max_sub_array2(self, nums: List[int]) -> int:
if not [i for i in nums if i > 0]: # 当列表为空时成立,即nums中全都不大于0时
return max(nums)
return self._max_sub_array(nums, 0, len(nums) - 1)
def _max_sub_array(self, nums: List[int], left, right) -> int:
if left >= right:
return nums[left] if nums[left] >= 0 else 0
mid = (left + right) // 2
max_left = self._max_sub_array(nums, mid + 1, right)
max_right = self._max_sub_array(nums, left, mid - 1)
max_mid_left, max_mid_right, temp = 0, 0, 0
for i in range(left, mid)[::-1]: # mid-1, mid-2, ... , left
temp += nums[i]
if temp > max_mid_left:
max_mid_left = temp
temp = 0
for i in range(mid, right + 1): # mid, mid+1, ... , right
temp += nums[i]
if temp > max_mid_right:
max_mid_right = temp
return max(max_left, max_right, (max_mid_left + max_mid_right))
def main():
nums = [-2, 1, -3, 4, -1, 2, 1, -5, 4]
print(Solution.max_sub_array(nums))
print(Solution().max_sub_array2(nums))
if __name__ == '__main__':
main()
| [
"xieziwei@bupt.edu.cn"
] | xieziwei@bupt.edu.cn |
4b7fd8aded02d8dc84c33f818cd258b0d071bf9a | 24718ad7f66a6cb6e69861e62bda069a511a34f9 | /main.py | 1a76cd7c52ef29b0d58a31c6d9c8235a5b38667f | [] | no_license | kmiao49/Scraper-Project | 5f7e69502ee6e347a7d3ed3067048d311bfa264a | 10f20d2b6755a5f42ef7f3c5c293242348f4d865 | refs/heads/master | 2023-01-21T13:40:27.086634 | 2020-12-04T01:05:48 | 2020-12-04T01:05:48 | 318,057,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,059 | py | #https://www.youtube.com/watch?v=ng2o98k983k&ab_channel=CoreySchafer
from bs4 import BeautifulSoup
import requests
import lxml
source = requests.get('https://coreyms.com/').text
soup = BeautifulSoup(source, 'lxml')
# article = soup.find('article')
# This parses out the first article in the html script using the 'find' function
# print(article.prettify())
# This prints out the article in question
# headline = article.h2.a.text
## The headline 'Python Tutorial: Zip Files - Creating and Extracting Zip Archives' is found under an h2 tag -> a tag -> and we want to pull out the text
# print(headline)
# summary = article.find('div', class_='entry-content').p.text
## This pulls out the summary text, which we use the find function to locate (along with class_ denominator to find the specific content in question)
# print(summary)
# vid_src = article.find('iframe', class_='youtube-player')['src']
# this allows us to look for the video source i.e. youtube URL...the ['src'] acts as a dictionary to pull the source from 'article'
# print(vid_src)
| [
"kevmiao35@gmail.com"
] | kevmiao35@gmail.com |
730c23a294cb645522e4991c66ff37f52146cf2c | 0ac3702a17958ca7fef01644a018fe445871946b | /setup.py | 8817d3ed58785f6f7f07fda95eb7d5547fa5ddcf | [
"MIT"
] | permissive | ydcjeff/clifold | c58bbfe337cb7a05115b460ec9849e62adf7a55d | 7a98c25480ad67b5662c2d052735b6d3a5a5a331 | refs/heads/master | 2021-08-29T05:09:05.551938 | 2020-04-06T09:02:49 | 2020-04-06T09:02:49 | 253,270,231 | 3 | 0 | MIT | 2021-08-23T06:32:48 | 2020-04-05T15:42:44 | Python | UTF-8 | Python | false | false | 1,563 | py | """Setup file for clifold python module"""
from setuptools import find_packages, setup
from os import path
from clifold.clifold_main import __version__
__version__ = __version__()
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
tests_require = [
"pytest",
"flake8"
]
extras = {
"test": tests_require
}
setup(
name="clifold",
version=__version__,
author="Jeff Yang",
author_email="ydc.jeff@gmail.com",
description="🚀 A CLI tool for scaffolding any Python Projects 🚀",
long_description=long_description,
long_description_content_type="text/markdown",
keywords="scaffold python project cli",
license="MIT",
url="https://github.com/ydcjeff/clifold",
packages=find_packages(),
entry_points={
"console_scripts": [
"clif=clifold.clifold_main:cli"
],
},
install_requires=[
"requests"
],
extras_require=extras,
tests_require=tests_require,
python_requires=">=3.6.0",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Utilities"
],
)
| [
"ydcjeff@outlook.com"
] | ydcjeff@outlook.com |
de05035213fb44b529fc7b0d220d6d7bbb7be20b | 6d6035a4c53747f7f07e27c070915eda11e40051 | /pelorus/wsgi.py | eae67edd8eefbacec9a95a92979a4bbca523ac68 | [] | no_license | rwtennant/pelorus | cb0b64ded59354078ce5c705edd444595e2aac5e | b3a5e3fa823da22bdb8960f0e60f4f9a6398f80a | refs/heads/master | 2022-12-10T17:09:17.396670 | 2020-09-14T18:53:02 | 2020-09-14T18:53:02 | 284,767,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
WSGI config for pelorus project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pelorus.settings')
application = get_wsgi_application()
| [
"rwtennant@yahoo.com"
] | rwtennant@yahoo.com |
562189a4e1b12ddef35fde6af9de64619a14e4c2 | c8b649f91df14e082108290e0ff62510b5d4006d | /Main/signals.py | b7d075909e8f993296f98e59c49f5633fcde1aeb | [] | no_license | Emmastro/freemarketBackend | d7e20aab13af68757dccf20f7b44dcd4de385ffc | 13cf2f5e45d5af778f334d30b1dadf2c8a8558b5 | refs/heads/master | 2022-12-14T22:51:40.420688 | 2020-04-19T08:45:54 | 2020-04-19T08:45:54 | 248,330,401 | 1 | 0 | null | 2022-12-08T03:50:03 | 2020-03-18T20:00:35 | JavaScript | UTF-8 | Python | false | false | 227 | py | from Accounts.models import Reader
from django.core.signals import request_started
from django.dispatch import receiver
@receiver(request_started, sender=Delegate)
def count_connection(sender, environ, **kwargs):
pass
| [
"emmamurairi@gmail.com"
] | emmamurairi@gmail.com |
55ef072d9d3d47d8603357377794fa880d8688c0 | e57d7785276053332c633b57f6925c90ad660580 | /sdk/logz/azure-mgmt-logz/azure/mgmt/logz/aio/operations/_tag_rules_operations.py | 8b601798141f717456f67ee5e23423c1c3b0e2ff | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | adriananeci/azure-sdk-for-python | 0d560308497616a563b6afecbb494a88535da4c5 | b2bdfe659210998d6d479e73b133b6c51eb2c009 | refs/heads/main | 2023-08-18T11:12:21.271042 | 2021-09-10T18:48:44 | 2021-09-10T18:48:44 | 405,684,423 | 1 | 0 | MIT | 2021-09-12T15:51:51 | 2021-09-12T15:51:50 | null | UTF-8 | Python | false | false | 15,827 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class TagRulesOperations:
"""TagRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~microsoft_logz.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
monitor_name: str,
**kwargs: Any
) -> AsyncIterable["_models.MonitoringTagRulesListResponse"]:
"""List the tag rules for a given monitor resource.
List the tag rules for a given monitor resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MonitoringTagRulesListResponse or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~microsoft_logz.models.MonitoringTagRulesListResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoringTagRulesListResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('MonitoringTagRulesListResponse', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}/tagRules'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
monitor_name: str,
rule_set_name: str,
body: Optional["_models.MonitoringTagRules"] = None,
**kwargs: Any
) -> "_models.MonitoringTagRules":
"""Create or update a tag rule set for a given monitor resource.
Create or update a tag rule set for a given monitor resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:param rule_set_name:
:type rule_set_name: str
:param body:
:type body: ~microsoft_logz.models.MonitoringTagRules
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MonitoringTagRules, or the result of cls(response)
:rtype: ~microsoft_logz.models.MonitoringTagRules
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoringTagRules"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
'ruleSetName': self._serialize.url("rule_set_name", rule_set_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if body is not None:
body_content = self._serialize.body(body, 'MonitoringTagRules')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MonitoringTagRules', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}/tagRules/{ruleSetName}'} # type: ignore
async def get(
self,
resource_group_name: str,
monitor_name: str,
rule_set_name: str,
**kwargs: Any
) -> "_models.MonitoringTagRules":
"""Get a tag rule set for a given monitor resource.
Get a tag rule set for a given monitor resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:param rule_set_name:
:type rule_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MonitoringTagRules, or the result of cls(response)
:rtype: ~microsoft_logz.models.MonitoringTagRules
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.MonitoringTagRules"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
'ruleSetName': self._serialize.url("rule_set_name", rule_set_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MonitoringTagRules', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}/tagRules/{ruleSetName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
monitor_name: str,
rule_set_name: str,
**kwargs: Any
) -> None:
"""Delete a tag rule set for a given monitor resource.
Delete a tag rule set for a given monitor resource.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:param rule_set_name:
:type rule_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-10-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
'ruleSetName': self._serialize.url("rule_set_name", rule_set_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 202:
response_headers['location']=self._deserialize('str', response.headers.get('location'))
if cls:
return cls(pipeline_response, None, response_headers)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logz/monitors/{monitorName}/tagRules/{ruleSetName}'} # type: ignore
| [
"noreply@github.com"
] | adriananeci.noreply@github.com |
c88481f7914ffa25fb6778923dac0b9bf1b1a436 | b2643444ade5d54529a2e81398e23ba18eddceaf | /examples/pytorch/bgrl/model.py | a6ae03ca8b4a62a7e252835a5e167eabed1bd20e | [
"Apache-2.0"
] | permissive | CuiDachao/dgl | 49a3644180201deae61c59fe25a1cc05e1090883 | 1947d87dd77eabe5893e277a52ecf0f9eb2f1063 | refs/heads/master | 2022-09-10T06:51:45.344189 | 2022-08-23T04:08:32 | 2022-08-23T04:08:32 | 257,187,311 | 0 | 0 | Apache-2.0 | 2020-04-20T06:07:35 | 2020-04-20T06:07:33 | null | UTF-8 | Python | false | false | 8,023 | py | import dgl
import copy
import torch
from torch import nn
from torch.nn.init import ones_, zeros_
from torch.nn import BatchNorm1d, Parameter
from dgl.nn.pytorch.conv import GraphConv, SAGEConv
class LayerNorm(nn.Module):
def __init__(self, in_channels, eps=1e-5, affine=True):
super().__init__()
self.in_channels = in_channels
self.eps = eps
if affine:
self.weight = Parameter(torch.Tensor(in_channels))
self.bias = Parameter(torch.Tensor(in_channels))
else:
self.register_parameter('weight', None)
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
ones_(self.weight)
zeros_(self.bias)
def forward(self, x, batch=None):
device = x.device
if batch is None:
x = x - x.mean()
out = x / (x.std(unbiased=False) + self.eps)
else:
batch_size = int(batch.max()) + 1
batch_idx = [batch == i for i in range(batch_size)]
norm = torch.tensor([i.sum() for i in batch_idx], dtype=x.dtype).clamp_(min=1).to(device)
norm = norm.mul_(x.size(-1)).view(-1, 1)
tmp_list = [x[i] for i in batch_idx]
mean = torch.concat([i.sum(0).unsqueeze(0) for i in tmp_list], dim=0).sum(dim=-1, keepdim=True).to(device)
mean = mean / norm
x = x - mean.index_select(0, batch.long())
var = torch.concat([(i * i).sum(0).unsqueeze(0) for i in tmp_list], dim=0).sum(dim=-1, keepdim=True).to(device)
var = var / norm
out = x / (var + self.eps).sqrt().index_select(0, batch.long())
if self.weight is not None and self.bias is not None:
out = out * self.weight + self.bias
return out
def __repr__(self):
return f'{self.__class__.__name__}({self.in_channels})'
class MLP_Predictor(nn.Module):
r"""MLP used for predictor. The MLP has one hidden layer.
Args:
input_size (int): Size of input features.
output_size (int): Size of output features.
hidden_size (int, optional): Size of hidden layer. (default: :obj:`4096`).
"""
def __init__(self, input_size, output_size, hidden_size=512):
super().__init__()
self.net = nn.Sequential(
nn.Linear(input_size, hidden_size, bias=True),
nn.PReLU(1),
nn.Linear(hidden_size, output_size, bias=True)
)
self.reset_parameters()
def forward(self, x):
return self.net(x)
def reset_parameters(self):
# kaiming_uniform
for m in self.modules():
if isinstance(m, nn.Linear):
m.reset_parameters()
class GCN(nn.Module):
def __init__(self, layer_sizes, batch_norm_mm=0.99):
super(GCN, self).__init__()
self.layers = nn.ModuleList()
for in_dim, out_dim in zip(layer_sizes[:-1], layer_sizes[1:]):
self.layers.append(GraphConv(in_dim, out_dim))
self.layers.append(BatchNorm1d(out_dim, momentum=batch_norm_mm))
self.layers.append(nn.PReLU())
def forward(self, g):
x = g.ndata['feat']
for layer in self.layers:
if isinstance(layer, GraphConv):
x = layer(g, x)
else:
x = layer(x)
return x
def reset_parameters(self):
for layer in self.layers:
if hasattr(layer, 'reset_parameters'):
layer.reset_parameters()
class GraphSAGE_GCN(nn.Module):
def __init__(self, layer_sizes):
super().__init__()
input_size, hidden_size, embedding_size = layer_sizes
self.convs = nn.ModuleList([
SAGEConv(input_size, hidden_size, 'mean'),
SAGEConv(hidden_size, hidden_size, 'mean'),
SAGEConv(hidden_size, embedding_size, 'mean')
])
self.skip_lins = nn.ModuleList([
nn.Linear(input_size, hidden_size, bias=False),
nn.Linear(input_size, hidden_size, bias=False),
])
self.layer_norms = nn.ModuleList([
LayerNorm(hidden_size),
LayerNorm(hidden_size),
LayerNorm(embedding_size),
])
self.activations = nn.ModuleList([
nn.PReLU(),
nn.PReLU(),
nn.PReLU(),
])
def forward(self, g):
x = g.ndata['feat']
if 'batch' in g.ndata.keys():
batch = g.ndata['batch']
else:
batch = None
h1 = self.convs[0](g, x)
h1 = self.layer_norms[0](h1, batch)
h1 = self.activations[0](h1)
x_skip_1 = self.skip_lins[0](x)
h2 = self.convs[1](g, h1 + x_skip_1)
h2 = self.layer_norms[1](h2, batch)
h2 = self.activations[1](h2)
x_skip_2 = self.skip_lins[1](x)
ret = self.convs[2](g, h1 + h2 + x_skip_2)
ret = self.layer_norms[2](ret, batch)
ret = self.activations[2](ret)
return ret
def reset_parameters(self):
for m in self.convs:
m.reset_parameters()
for m in self.skip_lins:
m.reset_parameters()
for m in self.activations:
m.weight.data.fill_(0.25)
for m in self.layer_norms:
m.reset_parameters()
class BGRL(nn.Module):
r"""BGRL architecture for Graph representation learning.
Args:
encoder (torch.nn.Module): Encoder network to be duplicated and used in both online and target networks.
predictor (torch.nn.Module): Predictor network used to predict the target projection from the online projection.
.. note::
`encoder` must have a `reset_parameters` method, as the weights of the target network will be initialized
differently from the online network.
"""
def __init__(self, encoder, predictor):
super(BGRL, self).__init__()
# online network
self.online_encoder = encoder
self.predictor = predictor
# target network
self.target_encoder = copy.deepcopy(encoder)
# reinitialize weights
self.target_encoder.reset_parameters()
# stop gradient
for param in self.target_encoder.parameters():
param.requires_grad = False
def trainable_parameters(self):
r"""Returns the parameters that will be updated via an optimizer."""
return list(self.online_encoder.parameters()) + list(self.predictor.parameters())
@torch.no_grad()
def update_target_network(self, mm):
r"""Performs a momentum update of the target network's weights.
Args:
mm (float): Momentum used in moving average update.
"""
for param_q, param_k in zip(self.online_encoder.parameters(), self.target_encoder.parameters()):
param_k.data.mul_(mm).add_(param_q.data, alpha=1. - mm)
def forward(self, online_x, target_x):
# forward online network
online_y = self.online_encoder(online_x)
# prediction
online_q = self.predictor(online_y)
# forward target network
with torch.no_grad():
target_y = self.target_encoder(target_x).detach()
return online_q, target_y
def compute_representations(net, dataset, device):
r"""Pre-computes the representations for the entire data.
Returns:
[torch.Tensor, torch.Tensor]: Representations and labels.
"""
net.eval()
reps = []
labels = []
if len(dataset) == 1:
g = dataset[0]
g = dgl.add_self_loop(g)
g = g.to(device)
with torch.no_grad():
reps.append(net(g))
labels.append(g.ndata['label'])
else:
for g in dataset:
# forward
g = g.to(device)
with torch.no_grad():
reps.append(net(g))
labels.append(g.ndata['label'])
reps = torch.cat(reps, dim=0)
labels = torch.cat(labels, dim=0)
return [reps, labels]
| [
"noreply@github.com"
] | CuiDachao.noreply@github.com |
fd46cde2226a90b53793bb9f7121bb66dbeb6c8e | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnwretch.py | fe95b845f09eef13e6cbc0d05e65b9e23f9a92cb | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 2,136 | py | ii = [('BentJDO2.py', 3), ('CookGHP3.py', 3), ('MarrFDI.py', 2), ('CoolWHM2.py', 8), ('KembFFF.py', 2), ('GodwWSL2.py', 48), ('ChanWS.py', 2), ('SadlMLP.py', 22), ('FerrSDO3.py', 19), ('WilbRLW.py', 8), ('WilbRLW4.py', 9), ('RennJIT.py', 1), ('AubePRP2.py', 3), ('CookGHP.py', 5), ('MartHSI2.py', 2), ('LeakWTI2.py', 3), ('KembFJ1.py', 16), ('WilkJMC3.py', 1), ('WilbRLW5.py', 2), ('PettTHE.py', 1), ('MarrFDI3.py', 1), ('PeckJNG.py', 3), ('KnowJMM.py', 3), ('BailJD2.py', 22), ('AubePRP.py', 1), ('ChalTPW2.py', 3), ('WilbRLW2.py', 6), ('ClarGE2.py', 4), ('GellWPT2.py', 1), ('CarlTFR.py', 18), ('LyttELD.py', 4), ('CoopJBT2.py', 6), ('TalfTAC.py', 7), ('GrimSLE.py', 1), ('RoscTTI3.py', 8), ('AinsWRR3.py', 7), ('CookGHP2.py', 3), ('KiddJAE.py', 5), ('AdamHMM.py', 1), ('BailJD1.py', 31), ('RoscTTI2.py', 4), ('CoolWHM.py', 7), ('MarrFDI2.py', 1), ('CrokTPS.py', 3), ('ClarGE.py', 13), ('IrviWVD.py', 7), ('GilmCRS.py', 3), ('DaltJMA.py', 4), ('DibdTRL2.py', 2), ('AinsWRR.py', 4), ('CrocDNL.py', 3), ('MedwTAI.py', 9), ('LandWPA2.py', 1), ('WadeJEB.py', 8), ('FerrSDO2.py', 16), ('TalfTIT.py', 6), ('GodwWLN.py', 12), ('SoutRD2.py', 2), ('LeakWTI4.py', 4), ('LeakWTI.py', 14), ('MedwTAI2.py', 13), ('SoutRD.py', 1), ('DickCSG.py', 2), ('WheeJPT.py', 1), ('HowiWRL2.py', 1), ('BailJD3.py', 19), ('HogaGMM.py', 12), ('MartHRW.py', 9), ('MackCNH.py', 19), ('FitzRNS4.py', 22), ('CoolWHM3.py', 7), ('DequTKM.py', 2), ('FitzRNS.py', 4), ('BentJRP.py', 7), ('EdgeMHT.py', 4), ('BowrJMM.py', 6), ('LyttELD3.py', 12), ('FerrSDO.py', 3), ('RoscTTI.py', 7), ('ThomGLG.py', 19), ('KembFJ2.py', 16), ('LewiMJW.py', 10), ('MackCNH2.py', 2), ('JacoWHI2.py', 2), ('HaliTBC.py', 1), ('WilbRLW3.py', 7), ('AinsWRR2.py', 7), ('JacoWHI.py', 2), ('ClarGE3.py', 8), ('RogeSIP.py', 8), ('MartHRW2.py', 5), ('DibdTRL.py', 8), ('FitzRNS2.py', 10), ('HogaGMM2.py', 3), ('MartHSI.py', 5), ('EvarJSP.py', 12), ('NortSTC.py', 8), ('SadlMLP2.py', 5), ('BowrJMM2.py', 7), ('BowrJMM3.py', 3), ('BeckWRE.py', 4), ('TaylIF.py', 12), ('WordWYR.py', 2), ('DibdTBR.py', 1), ('ChalTPW.py', 7), ('ThomWEC.py', 7), ('KeigTSS.py', 3), ('ClarGE4.py', 11), ('HowiWRL.py', 8)] | [
"varunwachaspati@gmail.com"
] | varunwachaspati@gmail.com |
22181b9fe8464921c05932796d73ede088aef55e | e82102580a5bd76e97ed607da7180faf9928cf7b | /barati/customers/views_cluster/save_main_preferences.py | 7d3a3cd1956fe2e4caf75ee317701323bfa60ada | [
"Apache-2.0"
] | permissive | aditi73/barati | 393d02de0e292a0e5a73c988944486396cb0ece1 | 09e1a0a1342aa8e9cf1e97f073f4a6472c5af415 | refs/heads/master | 2021-01-12T10:42:44.322211 | 2016-06-11T12:02:10 | 2016-06-11T12:02:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,586 | py | from django.shortcuts import render
from django.template import RequestContext
from django.views.generic import View
from django.http import HttpResponse
from customers import models as m
import sys, json, datetime
class Save_Main_Preferences(View):
try:
def __init__(self):
#self.template_name = ''
pass
def get_context_data(self, **kwargs):
context = {}
return context
def change_date_format_for_db(self, unformatted_date):
formatted_date = None
if unformatted_date:
formatted_date = datetime.datetime.strptime(unformatted_date, '%d-%b-%Y').strftime('%Y-%m-%d')
return formatted_date
def post(self, request, **kwargs):
user_id = m.Users.objects.get(username= request.user.username).id
date = self.change_date_format_for_db(request.POST.get('main_preference_date'))
location = request.POST.get('main_preference_location')
sublocation = request.POST.get('main_preference_sublocation')
main_preferences = m.Main_Preferences.objects.update_or_create( \
#Filter on the basis of the user_id
user_id=user_id, \
#Create a new entry if new values or update if updated values
defaults={'date':date, 'location':location, 'sublocation':sublocation}, \
)
message = "success_main_preferences_saved"
return HttpResponse(json.dumps(message))
except Exception as general_exception:
print general_exception
print sys.exc_traceback.tb_lineno
| [
"iabhaygupta90@gmail.com"
] | iabhaygupta90@gmail.com |
35c040b07aa7bd0f4a380d0b4d587ddaf57a31e5 | 1a3234471bdd3aae422c8f205a4450b1bca5c1b7 | /test/functional/p2p_feefilter.py | 37c3ceff7dbe77eaa1d402d723849a82320085db | [
"MIT"
] | permissive | Ultracoins/Ultracoin | 09f172f5c629c30aeb52c13a340b29991cbcaeda | 08357fed7fd6a97816f846dac1628d247c4730da | refs/heads/main | 2023-08-04T17:43:46.869853 | 2021-09-14T19:40:34 | 2021-09-14T19:40:34 | 403,222,588 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,189 | py | #!/usr/bin/env python3
# Copyright (c) 2016-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test processing of feefilter messages."""
from decimal import Decimal
from test_framework.messages import MSG_TX, MSG_WTX, msg_feefilter
from test_framework.p2p import P2PInterface, p2p_lock
from test_framework.test_framework import UltracoinTestFramework
from test_framework.util import assert_equal
from test_framework.wallet import MiniWallet
class FeefilterConn(P2PInterface):
feefilter_received = False
def on_feefilter(self, message):
self.feefilter_received = True
def assert_feefilter_received(self, recv: bool):
with p2p_lock:
assert_equal(self.feefilter_received, recv)
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.txinvs = []
def on_inv(self, message):
for i in message.inv:
if (i.type == MSG_TX) or (i.type == MSG_WTX):
self.txinvs.append('{:064x}'.format(i.hash))
def wait_for_invs_to_match(self, invs_expected):
invs_expected.sort()
self.wait_until(lambda: invs_expected == sorted(self.txinvs))
def clear_invs(self):
with p2p_lock:
self.txinvs = []
class FeeFilterTest(UltracoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
# We lower the various required feerates for this test
# to catch a corner-case where feefilter used to slightly undercut
# mempool and wallet feerate calculation based on GetFee
# rounding down 3 places, leading to stranded transactions.
# See issue #16499
# grant noban permission to all peers to speed up tx relay / mempool sync
self.extra_args = [[
"-minrelaytxfee=0.00000100",
"-mintxfee=0.00000100",
"-whitelist=noban@127.0.0.1",
]] * self.num_nodes
def run_test(self):
self.test_feefilter_forcerelay()
self.test_feefilter()
def test_feefilter_forcerelay(self):
self.log.info('Check that peers without forcerelay permission (default) get a feefilter message')
self.nodes[0].add_p2p_connection(FeefilterConn()).assert_feefilter_received(True)
self.log.info('Check that peers with forcerelay permission do not get a feefilter message')
self.restart_node(0, extra_args=['-whitelist=forcerelay@127.0.0.1'])
self.nodes[0].add_p2p_connection(FeefilterConn()).assert_feefilter_received(False)
# Restart to disconnect peers and load default extra_args
self.restart_node(0)
self.connect_nodes(1, 0)
def test_feefilter(self):
node1 = self.nodes[1]
node0 = self.nodes[0]
miniwallet = MiniWallet(node1)
# Add enough mature utxos to the wallet, so that all txs spend confirmed coins
miniwallet.generate(5)
node1.generate(100)
conn = self.nodes[0].add_p2p_connection(TestP2PConn())
self.log.info("Test txs paying 0.2 sat/byte are received by test connection")
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000200'), from_node=node1)['wtxid'] for _ in range(3)]
conn.wait_for_invs_to_match(txids)
conn.clear_invs()
# Set a fee filter of 0.15 sat/byte on test connection
conn.send_and_ping(msg_feefilter(150))
self.log.info("Test txs paying 0.15 sat/byte are received by test connection")
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000150'), from_node=node1)['wtxid'] for _ in range(3)]
conn.wait_for_invs_to_match(txids)
conn.clear_invs()
self.log.info("Test txs paying 0.1 sat/byte are no longer received by test connection")
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000100'), from_node=node1)['wtxid'] for _ in range(3)]
self.sync_mempools() # must be sure node 0 has received all txs
# Send one transaction from node0 that should be received, so that we
# we can sync the test on receipt (if node1's txs were relayed, they'd
# be received by the time this node0 tx is received). This is
# unfortunately reliant on the current relay behavior where we batch up
# to 35 entries in an inv, which means that when this next transaction
# is eligible for relay, the prior transactions from node1 are eligible
# as well.
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00020000'), from_node=node0)['wtxid'] for _ in range(1)]
conn.wait_for_invs_to_match(txids)
conn.clear_invs()
self.sync_mempools() # must be sure node 1 has received all txs
self.log.info("Remove fee filter and check txs are received again")
conn.send_and_ping(msg_feefilter(0))
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00020000'), from_node=node1)['wtxid'] for _ in range(3)]
conn.wait_for_invs_to_match(txids)
conn.clear_invs()
if __name__ == '__main__':
FeeFilterTest().main()
| [
"vfpro7777@gmail.com"
] | vfpro7777@gmail.com |
8e10622d95da116d71a46fefd79306ca49995f9b | 220f5e95c84b1d0288777f276cfabb079e9fe824 | /python/gigasecond/gigasecond.py | e13d812e72e0a930e1e600c5f295a5eeaba4536b | [] | no_license | EwenFin/exercism_solutions | 3c1f2213a0d3989655c6f1918e76a97a742fb852 | c0f18201bd0603d074031ce3c51570f4321dbcb9 | refs/heads/master | 2020-03-20T05:53:41.079276 | 2018-06-13T16:16:19 | 2018-06-13T16:16:19 | 137,230,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | import datetime
def add_gigasecond(birth_date):
giga_date = birth_date + datetime.timedelta(seconds=1000000000)
return giga_date
| [
"ewen.carr@ros.gov.uk"
] | ewen.carr@ros.gov.uk |
3d663d37b03f7370f829e314ff592048da8baadc | 2360cee220fa1d4df735e663c2324f6716800a4c | /allauth/facebook/migrations/0002_auto__add_facebookaccesstoken__add_unique_facebookaccesstoken_app_acco.py | b1a780d35611b2a2d0a1fcb44c5feb7d8c27a289 | [
"MIT"
] | permissive | sachingupta006/django-allauth | 709036a6a20f03fb7fb1d9ee555822526847e658 | 04a510f6b873cb3a54feca59cdd0c3e3ff9b9b5e | refs/heads/master | 2021-01-17T22:11:38.164739 | 2012-06-09T16:58:47 | 2012-06-09T16:58:47 | 3,551,445 | 5 | 5 | null | null | null | null | UTF-8 | Python | false | false | 7,146 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'FacebookAccessToken'
db.create_table('facebook_facebookaccesstoken', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('app', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['facebook.FacebookApp'])),
('account', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['facebook.FacebookAccount'])),
('access_token', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal('facebook', ['FacebookAccessToken'])
# Adding unique constraint on 'FacebookAccessToken', fields ['app', 'account']
db.create_unique('facebook_facebookaccesstoken', ['app_id', 'account_id'])
def backwards(self, orm):
# Removing unique constraint on 'FacebookAccessToken', fields ['app', 'account']
db.delete_unique('facebook_facebookaccesstoken', ['app_id', 'account_id'])
# Deleting model 'FacebookAccessToken'
db.delete_table('facebook_facebookaccesstoken')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'facebook.facebookaccesstoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'FacebookAccessToken'},
'access_token': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['facebook.FacebookAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['facebook.FacebookApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'facebook.facebookaccount': {
'Meta': {'object_name': 'FacebookAccount', '_ormbases': ['socialaccount.SocialAccount']},
'link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'social_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'socialaccount_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['socialaccount.SocialAccount']", 'unique': 'True', 'primary_key': 'True'})
},
'facebook.facebookapp': {
'Meta': {'object_name': 'FacebookApp'},
'api_key': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'application_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'application_secret': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'socialaccount.socialaccount': {
'Meta': {'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['facebook']
| [
"raymond.penners@intenct.nl"
] | raymond.penners@intenct.nl |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.