index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
11,500 | dac16d8a11f95f8ec91a22d2fd9baca5d611a8a1 |
def solution(maze):
# 상우하좌
dr = (-1, 0, 1, 0)
dc = (0, 1, 0, -1)
N = len(maze)
r, c, d = 0, 0, 1
cnt = 0
while True:
if (r, c) == (N-1, N-1): # 목적지 도착
break
# 처음 왼쪽 안되면 시계방향 확인
d = (d + 3) % 4
for _ in range(4):
nr = r + dr[d]
nc = c + dc[d]
if not (0 <= nr < N and 0 <= nc < N) or maze[nr][nc] == 1:
d = (d+1) % 4
continue
cnt += 1
r, c = nr, nc
break
return cnt
print(solution([[0, 1, 0, 1], [0, 1, 0, 0], [0, 0, 0, 0], [1, 0, 1, 0]]))
# print(solution([[0, 1, 0, 0, 0, 0], [0, 1, 0, 1, 1, 0], [0, 1, 0, 0, 1, 0], [0, 1, 1, 1, 1, 0], [0, 1, 0, 0, 0, 0], [0, 0, 0, 1, 1, 0]]))
# print(solution([[0, 1, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0], [0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 0]]))
# print(solution([[0, 0, 0, 0, 0, 0], [1, 1, 1, 0, 1, 1], [0, 0, 0, 0, 0, 0], [1, 0, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0], [1, 1, 0, 1, 1, 0]])) |
11,501 | 4af41560a916cd709a888dc108e2d92537007f1d | from django.shortcuts import render, redirect, HttpResponse
from django.views.generic import View
from .forms import *
from django.contrib.auth import login, authenticate, logout as dj_logout
from .models import *
from app.models import *
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import JsonResponse, HttpResponse, Http404
import os
from django.conf import settings
class ProfileView(View):
def get(self, request):
return render(request, 'profile/user-profile.html')
class SeekerSignUpView(View):
def get(self, request):
form = SeekerSignUpForm
return render(request, 'register/seeker_signup.html', {'form': form})
def post(self, request):
form = SeekerSignUpForm(request.POST or None)
if form.is_valid():
user = form.save(commit=False)
user.save()
return redirect('login-user')
return render(request, 'register/seeker_signup.html', {'form': form,
'status': "Password not match or Password must be consist of number and character and lenght greater than 8"})
class OwnerSignUpView(View):
def get(self, request):
form = OwnerSignUpForm
return render(request, 'register/owner_signup.html', {'form': form})
def post(self, request):
form = OwnerSignUpForm(request.POST or None)
if form.is_valid():
user = form.save(commit=False)
user.save()
return redirect('login-user')
return render(request, 'register/owner_signup.html', {'form': form,
'status': "Password not match or Password must be consist of number and character and lenght greater than 8"})
class LoginView(View):
def get(self, request):
form = LoginForm
return render(request, 'register/user-login.html', {'form': form})
def post(self, request):
form = LoginForm(request.POST or None)
if form.is_valid():
email = request.POST['email']
password = request.POST['password']
user = authenticate(email=email, password=password)
if user:
login(request, user)
return redirect('home')
return render(request, 'register/user-login.html',
{'form': form, 'status': "Your password or username is incorrect"})
def user_logout(request):
dj_logout(request)
return redirect('login-user')
class ProfileView(View):
def get(self, request):
user = request.user
print(">>>>>>>>>>>>>>>>>>>>User", user)
if user.is_authenticated:
try:
profile = Profile.objects.get(user=user)
except Profile.DoesNotExist:
# path = settings.MEDIA_ROOT+'/media/Capture.PNG'
profile = Profile.objects.create(user=user, pro_photo='', resume='', first_name='', last_name='',
degree_name='',
graduate_year='', father_name='', mother_name='',
gender='', religion='', marital_status='', nationality='',
phone_number='', date_of_birth='', address='', job_name='',
keywords='', salary_range='', job_type='')
profile.save()
form = UserProfileForm(initial={
'pro_photo': profile.pro_photo,
'resume': profile.resume,
'first_name': profile.first_name,
'last_name': profile.last_name,
'degree_name': profile.degree_name,
'graduate_year': profile.graduate_year,
'father_name': profile.father_name,
'mother_name': profile.mother_name,
'gender': profile.gender,
'religion': profile.religion,
'marital_status': profile.marital_status,
'nationality': profile.nationality,
'phone_number': profile.phone_number,
'date_of_birth': profile.date_of_birth,
'address': profile.address,
'job_name': profile.job_name,
'keywords': profile.keywords,
'salary_range': profile.salary_range,
'job_type': profile.job_type
})
return render(request, 'profile/user-profile.html', {'profile': profile, 'form': form})
else:
return redirect('login-user')
def post(self, request):
form = UserProfileForm(request.POST, request.FILES)
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>", form)
if form.is_valid():
profile = Profile.objects.get(user=request.user)
profile.pro_photo = request.FILES['pro_photo']
profile.resume = request.FILES['resume']
profile.first_name = request.POST['first_name']
profile.last_name = request.POST['last_name']
profile.father_name = request.POST['father_name']
profile.mother_name = request.POST['mother_name']
profile.degree_name = request.POST['degree_name']
profile.graduate_year = request.POST['graduate_year']
profile.gender = request.POST['gender']
profile.religion = request.POST['religion']
profile.marital_status = request.POST['marital_status']
profile.nationality = request.POST['nationality']
profile.phone_number = request.POST['phone_number']
profile.date_of_birth = request.POST['date_of_birth']
profile.address = request.POST['address']
profile.job_name = request.POST['job_name']
profile.keywords = request.POST['keywords']
profile.salary_range = request.POST['salary_range']
profile.job_type = request.POST['job_type']
profile.save()
return redirect('profile')
else:
return HttpResponse("Error")
class AllUserProfile(View):
def get(self, request):
candidates = Profile.objects.filter(user__is_seeker=True)
job_types = ['Part Time', 'Full Time', 'Other']
categories = Category.objects.all()
page = request.GET.get('page', 1)
pagination = Paginator(candidates, per_page=10)
try:
candidates = pagination.page(page)
except PageNotAnInteger:
candidates = pagination.page(1)
except EmptyPage:
candidates = Paginator.page(pagination.num_pages)
return render(request, 'profile/browse-candidates.html',
{'candidates': candidates, 'job_types': job_types, 'categories': categories})
def searchCandidate(request):
if request.method == "POST":
keyword = request.POST.get('keyword', '').lower()
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>", keyword, "<<<<<<")
search_items = Profile.objects.filter(job_name__icontains=keyword) | Profile.objects.filter(
job_type__icontains=keyword) | Profile.objects.filter(keywords__icontains=keyword)
return JsonResponse({'search_cand': list(search_items.values()), 'status': "OK"})
class UserProfileDetail(View):
def get(self, request, id):
candidate = Profile.objects.get(pk=id)
return render(request, 'profile/userprofile-to-other.html', {'candidate': candidate})
def download(request, path):
file_path = os.path.join(settings.MEDIA_ROOT, path)
if os.path.exists(file_path):
with open(file_path, 'rb') as fh:
response = HttpResponse(fh.read(), content_type="application/vnd.ms-excel")
response['Content-Disposition'] = 'inline; filename=' + os.path.basename(file_path)
return response
raise Http404
class AllOwnerProfile(View):
def get(self, request):
companies = Profile.objects.filter(user__is_owner=True)
job_types = ['Part Time', 'Full Time', 'Other']
categories = Category.objects.all()
page = request.GET.get('page', 1)
pagination = Paginator(companies, per_page=10)
try:
companies = pagination.page(page)
except PageNotAnInteger:
companies = pagination.page(1)
except EmptyPage:
companies = Paginator.page(pagination.num_pages)
return render(request, 'profile/browse-company.html',
{'companies': companies, 'job_types': job_types, 'categories': categories})
def searchCompany(request):
if request.method == "POST":
keyword = request.POST.get('keyword', '').lower()
print(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>", keyword, "<<<<<<")
search_items = Profile.objects.filter(job_name__icontains=keyword) | Profile.objects.filter(
job_type__icontains=keyword) | Profile.objects.filter(keywords__icontains=keyword)
return JsonResponse({'search_cand': list(search_items.values()), 'status': "OK"})
class CompanyDetail(View):
def get(self, request, id):
candidate = Profile.objects.get(pk=id)
return render(request, 'profile/userprofile-to-other.html', {'candidate': candidate})
|
11,502 | 59028563e4bc3e698179dceb0bf67190cda6643a | # Generated by Django 2.2.5 on 2020-02-21 12:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0007_auto_20200220_0729'),
]
operations = [
migrations.AddField(
model_name='product',
name='slug',
field=models.SlugField(default=1),
preserve_default=False,
),
]
|
11,503 | 682d6e0d9866afe5defb3c0fec8a78ef0167d044 | # -*- coding: utf-8 -*-
n = int(input())
horas = n // 3600
n %= 3600
minutos = n // 60
n %= 60
print('{0:d}:{1:d}:{2:d}'.format(horas, minutos, n))
|
11,504 | 1bd356d2666da63fca9e62178c37e9b03d006dd5 | import sys,os
import math
from optparse import OptionParser
# define options
parser = OptionParser()
parser.add_option("-t", "--tower", dest="tower", default=28, help="tower number")
parser.add_option("-b", "--border", dest="border", default=0, help="tower border (0 = center, -1 = low eta, 1 = high eta")
parser.add_option("-p", "--phistep", dest="phistep", default=1, help="phi step")
parser.add_option("-d", "--dir", dest="dir", default="phi1", help="file directory")
(options, args) = parser.parse_args()
PHISTEP=int(options.phistep)
TOWERNUM=int(options.tower)
TBORNAME=""
if int(options.border)==1:
TBORNAME="hi"
elif int(options.border)==-1:
TBORNAME="lo"
TOWERNAME=str(abs(TOWERNUM))
if TOWERNUM<0:
TOWERNAME="m"+TOWERNAME
output = open("log_results_tower"+TOWERNAME+TBORNAME+"_phi"+str(PHISTEP)+".txt",'w')
for PHINUM in range(0,360/PHISTEP):
#phi spans 0 to 2pi
phival = (PHISTEP*PHINUM)*math.pi/180
cmd = "./matbudget_log_reader.exe "+options.dir+"/detailedInfo_tower"+TOWERNAME+TBORNAME+"_phi"+str(PHISTEP)+"_"+str(PHINUM)+".log "+options.dir+"/log_matbudget_tower"+TOWERNAME+TBORNAME+"_phi"+str(PHISTEP)+"_"+str(PHINUM)+".txt"
print cmd
pipe = os.popen(cmd)
result = pipe.read().rstrip()
exitcode = pipe.close()
if exitcode!=None: continue
result = str(TOWERNUM)+"\t"+str(phival)+"\t"+result
output.write(result+"\n")
output.close()
|
11,505 | a57b641b0771c551351ce4984848775f30be467a | from flask import render_template, request, Blueprint, flash, redirect, url_for, make_response, current_app
from src.models import SignIn, User
from src import db
from src.users.forms import SignInForm, ContactForm
from src.services import business_url_return, EmailService, create_cookie, grab_cookie
from src.users.decorators import verify_business
main = Blueprint('main', __name__)
@main.route("/")
@main.route("/home")
def home():
return render_template('home.html')
@main.route("/about")
def about():
return render_template('about.html', title='About')
@main.route("/contact", methods=['GET', 'POST'])
def contact():
form = ContactForm()
if form.validate_on_submit():
if current_app.config["FLASK_ENV"] == 'production':
body_html = render_template(
'mail/user/contact_email.html',
name=form.name.data,
message=form.message.data,
email=form.email.data
)
body_text = render_template(
'mail/user/contact_email.txt',
name=form.name.data,
message=form.message.data,
email=form.email.data
)
EmailService.email('admin@c-sign.in', 'New message.', body_html, body_text)
return render_template('success.html', type='email')
return render_template('contact.html', title='Contact Us', form=form)
@main.route("/signin/<string:business_name>", methods=['GET', 'POST'])
@verify_business
def sign_in(business_name):
"""
Main sign in function.
Will search the database for existing business taken from the url, if not found will return 404.
If user has previously signed in within the last 4 hours they will not need to sign in again as a
cookie will store their previous login.
:param business_name:
:return:
"""
req = request
business = User.query.filter_by(business_url=business_name).first_or_404()
logo = url_for('static', filename='profile_pics/' + business.logo)
form = SignInForm()
if form.validate_on_submit():
new_sign_in = SignIn(
first_name=form.first_name.data,
last_name=form.last_name.data,
email=form.email.data,
phone=form.phone_number.data,
symptoms=form.symptoms.data,
signup=form.sign_up.data,
user_id=business
)
db.session.add(new_sign_in)
db.session.commit()
if business.menu_url is not None:
bu = business_url_return(business.menu_url)
res = make_response(redirect(bu))
# Set sign in to 4 hours so guest can re-scan the code
res.set_cookie(business_name, 'signed_in', secure=True, max_age=60 * 60 * 4)
if not request.cookies.get('csign'):
create_cookie(res, form)
return res
else:
flash('You have been signed in!', 'success')
return render_template('signin.html', logo=logo, business_name=business_name, form=form)
elif request.method == 'GET':
bu = business_url_return(business.menu_url)
res = make_response(redirect(bu))
return res
if request.cookies.get('csign'):
# pull cookie information
grab_cookie(form, request)
if request.cookies.get(business_name):
if business.menu_url is not None:
bu = business_url_return(business.menu_url)
res = make_response(redirect(bu))
return res
return render_template('signin.html', logo=logo, business_name=business.business_name, form=form)
return render_template('signin.html', logo=logo, business_name=business.business_name, form=form)
|
11,506 | 47b81fa78bf88e0eadf627e0c92a848715af73d1 | # Generated by Django 2.2.4 on 2019-08-18 14:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('foods', '0010_auto_20190818_1434'),
]
operations = [
migrations.AlterField(
model_name='recipeingredient',
name='quantity',
field=models.CharField(max_length=255, null=True),
),
migrations.AlterField(
model_name='recipeingredient',
name='unit',
field=models.CharField(max_length=255, null=True),
),
]
|
11,507 | 93d23f282427712ec4f662f9686bb46ed44f71df | from models.simulation.Simulate import simulate_money_line
from models.atp_tennis.TennisMatchMoneyLineSklearnModels import bet_func
import pandas as pd
if __name__ == '__main__':
# test edge cases
prob_pos = [0.6, 0.6, 0.6, 0.6, 0.6, 0.6]
actual = [1, 1, 0, 1, 0, 1]
parameters = {
'max_loss_percent': 0.05
}
test_data = pd.DataFrame(data =
{'max_price1': [100, 100, 100, 100, 100, 100],
'max_price2': [-100, -100, -100, -100, -100, -100],
'field': [1,2,3,4,5,6],
'field2': ['something', 'else', 'id', '','as', '']
})
test_return, num_bets = simulate_money_line(lambda j: prob_pos[j], lambda j: actual[j], lambda _: None,
bet_func(0), test_data, parameters,
'max_price', 5, sampling=0, shuffle=True, verbose=False)
print('Final test return:', test_return, ' Num bets:', num_bets)
print('---------------------------------------------------------')
# test general cases
print('Test data:', test_data)
test_data.sort_values(by=['field2'], kind='mergesort', inplace=True)
print('Sorted test data:', test_data)
test_data.reset_index(drop=True)
print('Test data reset index:', test_data)
print('Sorted row 0:', test_data['field'][0])
# more tests
|
11,508 | 558e62159759c15224a0a404ce157b7f2910afad | ###
## Reads a pattern csv file and returns the most common grid passwords
##
## D.E. Budzitowski 150876
###
import sys,csv,collections,Image
def main(argv):
## Error Check
if len(argv) is not 1 or (int(argv[0]) is not 3 and int(argv[0]) is not 4):
exit("incorrect args")
## Path to file
s = str(argv[0]) + 'x' + str(argv[0])
rootPath = "./analysis/" + s + "/"
path = rootPath + s + "_data.csv"
## Open csv
file = open(path)
reader = csv.DictReader(file)
pattern_image = {}
patternList = []
for row in reader:
if(row['pattern'] is not 'X' and row['type'] is not 'r'):
patternList.append(row['pattern'])
pattern_image[row['pattern']] = row['image']
counter = collections.Counter(patternList)
i = 1
for patt in counter.most_common(5):
print patt
print pattern_image[patt[0]]
im = Image.open(pattern_image[patt[0]])
im.show()
path = "./analysis/common/" + s + '-' + str(i) + ".bmp"
im.save(path)
i += 1
if(__name__ == "__main__"):
main(sys.argv[1:])
|
11,509 | e962aeb4ba59e49a30b07812de5ac36392d763c1 | #!/usr/bin/python3
import random
from typing import List
from player import Player
from games import snake, hangman, skribble
class Main(object):
def __init__(self):
self.players: List = []
self.games = [
skribble.Skribble(),
hangman.Hangman(),
snake.Snake()
]
def set_game(self):
print('Select a game:')
i = 0
for game in self.games:
print('[%i] ' % (i, ) + game.get_info(key='name'))
i += 1
i = int(input('Spiel: '))
return self.games[i]
def set_players(self, player_min: int, player_max: int):
"""
Returns a list of all players in the game.
:param player_min: minimum required players
:param player_max: maximum required players
"""
name: str = ' '
print('Please give between %i and %i names for your players'
% (player_min, player_max))
while (name != '') and (len(self.players) < player_max):
name = input('Players {}: '.format(len(self.players)+1))
if name != '':
self.players.append(Player(name))
elif len(self.players) < player_min:
name = ' '
print()
print('{} players registered.'.format(len(self.players)))
print()
def get_winner(self):
"""
Returns the self.winner of the game.
"""
winner: Player = Player('none')
points_winner = 0
for player in self.players:
for key, value in player.get_stats().items():
print('{}: {}'.format(key, value))
if key == 'points':
if value >= points_winner:
winner = player
print()
print('The winner is: ' + winner.get_name())
return winner
def main(self):
print('Welcome to Lahoumy.')
game = self.set_game()
multiplayer: bool = len(self.players) > 1
print()
self.set_players(game.get_info(key='player_min'),
game.get_info(key='player_max'))
game.start()
# get random player
player = self.players[random.randint(0, len(self.players)-1)]
while game.get_running():
# game round
if multiplayer:
print('{} plays now'.format(player.get_name()))
# play one game round
won = game.play(self.players)
# add points to the players
player.add_round(1)
if multiplayer:
player_new: Player = game.get_winner_round(player)
if player_new is not None:
player.add_point(1)
player_new.add_point(2)
# the self.winner is the next player
player = player_new
else:
# in single player
if won:
player.add_point(1)
if input('New round? (y)') != 'y':
break
print()
print()
game.end()
self.get_winner()
if __name__ == '__main__':
Main().main()
|
11,510 | 71c186fbe3a9784352f856c4b92a5c333d25f946 | from util import input_rows
def largest_value(instructions):
largest_ever = 0
for name in register_names(instructions):
locals()[name] = 0
for instruction in instructions:
exec(translate(instruction))
largest_ever = max(largest_ever, _largest_int(locals().values()))
locals_copy = locals().copy()
locals_copy.pop('largest_ever')
largest_at_end = _largest_int(locals_copy.values())
return largest_at_end, largest_ever
def _largest_int(values):
ints = [value for value in values if isinstance(value, int)]
return max(ints) if ints else 0
def register_names(instructions):
return {instruction.split()[0] for instruction in instructions}
def translate(instruction):
instruction = instruction.replace('inc', '+=').replace('dec', '-=')
left, right = instruction.split(' if ')
return f'if {right}: {left}'
if __name__ == '__main__':
print(largest_value(input_rows(8)))
|
11,511 | 7dc822da0f55722da70ecd1b2b6432f9cd99ab6b | import json
with open('persons.json', 'r') as f:
my_dict = json.load(f)
for distro in my_dict:
print(distro['gender'])
print(distro['name'])
|
11,512 | 91a4c0453cc58b567892c1daac6dc9efec9b5b3a | #---------------------------------------------------------------------------
# Twitter Streaming Bot - Copyright 2017, Leo Brack, All rights reserved.
#---------------------------------------------------------------------------
# This code tracks keyword mentions by streaming real time data from the
# the twitter API.
# The number of tweets per second are then streamed to the plotly interface
# allowing for real time insight into keyword mentions
# To view plot visit: https://plot.ly/~lbrack1/4
#
# --------------------------------------------------------------------------
import tweepy,time,json,datetime
import plotly
import plotly.plotly as py
import plotly.tools as tls
import plotly.graph_objs as go
import MySQLdb
# Import twitter API keys and words to track from "authentication.py"
from authentication import authentication
from authentication import configure
# Global variables to keep track of tweets
num_tweets = 0
num_points = 0
start_time = time.time()
elapsed_time = 0
total_time = 0
x = []
y = []
#---------------------------------------------------------------------------
# DATABASE FUNCTIONS
#---------------------------------------------------------------------------
# This function takes the 'created_at', 'text', 'screen_name' and 'tweet_id' and stores it
# into a MySQL database
def store_data(created_at, text, screen_name, tweet_id):
db=MySQLdb.connect(host='localhost', user='leobrack', passwd='password', db='crypto_db', charset="utf8mb4")
cursor = db.cursor()
insert_query = "INSERT INTO twitter (tweet_id, screen_name, created_at, text) VALUES (%s, %s, %s, %s)"
#try:
cursor.execute(insert_query, (tweet_id, screen_name, created_at, text))
db.commit()
cursor.close()
db.close()
#except:
#print "Unexpected error when saving tweet to database", sys.exc_info()[0]
#return
#---------------------------------------------------------------------------
# Tweeter Streamer Class
#---------------------------------------------------------------------------
# Define class to handle incoming tweets
class MyListener(tweepy.StreamListener):
def __init__(self, api = None, update_time = 5):
self.api = api
self.update_time = update_time
# This function is called when we recieve a tweet
def on_data(self, data):
try:
# Get time stamp for tweet
global start_time
elapsed_time = time.time() - start_time
#Load data from json format
tweet = json.loads(data)
text = tweet['text']
screen_name = tweet['user']['screen_name']
tweet_id = tweet['id']
created_at = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
#Filter tweet, uncomment line below for filtering
#self.filter_tweet(tweet)
#Define counter as global variable
global num_tweets
num_tweets += 1 #Add 1 to count as we have a new tweet!
store_data(created_at, text, screen_name, tweet_id)
# Convert UTF-8 to ASCII and print details to screen
#print '@%s: %s\n' % (tweet['user']['screen_name'], tweet['text'].encode('ascii', 'ignore'))
# Save data to file
#self.output.write(data)
# Print stats and plot every "update_time"
# If there is no tweet recieved within this time block the plot will not update
# Instead it will update next time it recieves a tweet
if elapsed_time - self.update_time() > 0:
# Keep track of the number of points in data array
global num_points
num_points = num_points + 1
# Calculate tweets per second
tweets_per_sec = num_tweets/elapsed_time
x.append(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
y.append(tweets_per_sec)
# Send data to the plotly server to plot
s.write(dict(x=x, y=y))
self.print_stats(num_tweets,elapsed_time)
start_time = time.time()
num_tweets = 0
# Keep a maximum of x points on graph
if (num_points > 10):
x.pop(0) # Remove first data point
y.pop(0)
return True
except Exception, e:
print e
pass
# Function to filter out retweets
def filter_tweet(self,tweet):
if 'RT' in tweet['text']:
print 'Retweet'
return
else:
pass
# Function to print statistics
def print_stats(self,counter,elapsed_time):
print "-----------------------"
print "Elapsed Time: ", elapsed_time
print "Number of tweets: ", counter
print "Tweets/second: %.2f" % (counter/elapsed_time)
# Error handling
def on_error(self, status):
print(status)
return True
#---------------------------------------------------------------------------
# Main
#---------------------------------------------------------------------------
if __name__ == '__main__':
#---------------------------------------------------------------------------
# PLOTLY SET UP
#---------------------------------------------------------------------------
stream_ids = tls.get_credentials_file()['stream_ids']
# Get stream id from stream id list
stream_id = stream_ids[0]
# Make instance of stream id object
stream_1 = go.Stream(
token=stream_id, # link stream id to 'token' key
maxpoints=10 # keep a max of 80 pts on screen
)
# Initialize trace of streaming plot by embedding the unique stream_id
trace1 = go.Scatter(
x=[],
y=[],
mode='lines+markers',
stream=stream_1 # (!) embed stream id, 1 per trace
)
data = go.Data([trace1])
# Add title to layout object
layout = go.Layout(title='Frequency (tweets/sec) of tweets mentioning bitcoin', xaxis={'title':'Time'}, yaxis={'title':'Mentions per Second'})
# Make a figure object
fig = go.Figure(data=data, layout=layout)
# Send fig to Plotly, initialize streaming plot, open new tab
py.iplot(fig, filename='Bitcoin Mentions')
# Provide the stream link object the same token that's associated with the trace we wish to stream to
s = py.Stream(stream_id)
# Open a connection
s.open()
#---------------------------------------------------------------------------
# STREAMER SET UP
#---------------------------------------------------------------------------
auth = authentication()
config = configure()
# Import words to track as variable 'track'
track = config.gettrack_1()
update_time = config.getupdate_time
consumer_key = auth.getconsumer_key()
consumer_secret = auth.getconsumer_secret()
access_token = auth.getaccess_token()
access_token_secret = auth.getaccess_token_secret()
# tweepy handles accessing the API
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.secure = True
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
twitter_stream = tweepy.Stream(auth, MyListener(api, update_time))
# Import words to track as variable 'track'
track = config.gettrack_1()
update_time = config.getupdate_time
# Start streaming tweets from twitter
twitter_stream.filter(track = track)
#192.168.0.104
|
11,513 | b76de5a900151a387b058faca063cbca48730196 | __author__ = 'Devesh Bajpai'
'''
https://codeforces.com/problemset/problem/1358/B
Solution: Carefully observing the question, we can see that we don't need to summon the grannies
in batches, rather can do them all at once as long as their constraint of enough grannies present
in the courtyard is satisfied. This would produce the same result as summoning them in batches as
explained in the question. We sort the array and iterate in descending order. This way, for any
value which is less that or equal to its index (+1 since question works on 1 - indexed), that is the
breakage point and all the grannies before that can be summoned. The result returned is i + 2,
since i + 1 takes care of 1-indexed quantities of grannies and that extra one is for the host.
Also arr[i] <= (i + 1) can be understood as ith granny needing i + 1 grannies in the yard, which
excludes herself but can include the host, therefore i + 1 - 1 + 1 = i + 1.
'''
def solve(n, arr):
arr.sort()
for i in xrange(n-1, -1, -1):
if arr[i] <= (i + 1):
return i + 2
return 1
if __name__ == "__main__":
t = int(raw_input())
results = list()
for _ in xrange(0, t):
n = int(raw_input())
arr = map(int, raw_input().split(" "))
results.append(solve(n, arr))
for result in results:
print result
|
11,514 | 77f1fce7a8ff9511df3eea6d50c43e42feb9d218 | """
Example of a map-reduce type operation where a list of A_i is mapped to a list of B_i
and then reduced to produce C.
The B_i are stored in intermediate values, and we wish to delete them after C has been computed.
In this example we use check_valid="shallow" so the deletion of B_i does not cause the
re-computation of C, but at the same time a change in any A_i will.
Run:
redun run map_reduce.py main
# re run to verify that all jobs were cached
redun run map_reduce.py main
# modify a value in the list ["A_1", "A_2", "A_3"]
# Verify that the computation is re-triggered
redun run map_reduce.py main
"""
from typing import List
from redun import task, File
from redun import functools
redun_namespace = "redun.examples.memoization_patterns"
@task()
def generate_a_values() -> List[str]:
"""
Generates a list of strings.
Changes in this list should re trigger computation of C
"""
return ["A_1", "A_2", "A_3"]
@task()
def map_a_to_b(a_val: str) -> File:
"""
Maps values "A_i" to "B_i" and stores the resulting string in a file.
Deletion of these files should *not* re trigger the computation of C
"""
# Create B_i from A_i
b_val = f"B_{a_val[-1]}"
# Create a file to store the resulting b
file_path = f"./b_file_{b_val[-1]}"
b_file = File(file_path)
b_file.write(b_val)
return b_file
@task()
def delete_b_files(intermediate_files: List[File]) -> None:
"""
Deletes a list of files
"""
for f in intermediate_files:
f.remove()
@task()
def reduce_b_files_into_c(files: [List[File]]) -> str:
"""
Loads all the files in a list and concatenates the first line of each
"""
b_strings = [file.read() for file in files]
# Concatenate
c_val = " ".join(b_strings)
return c_val
@task()
def use_c(c_val: str) -> str:
"""
Prints a string
"""
print(c_val)
return c_val
@task(check_valid='shallow')
def map_reduce_no_caching(a_values: List[str]) -> str:
# Maps the values of A to a list of file promises that will be evaluated later
b_val_file_promises = [map_a_to_b(a) for a in a_values]
# Promises to reduce the values from the files to generate a string c
c = reduce_b_files_into_c(b_val_file_promises)
# Indicates that c should be evaluated before delete_b_files. This way b_files are used
# before deletion
c_value = functools.seq((c, delete_b_files))[0]
return c_value
@task()
def main() -> str:
a_values = generate_a_values()
c = map_reduce_no_caching(a_values)
return use_c(c)
|
11,515 | 4b3066c508bd43cb2dae68cc6ed1aec83486a959 | # Create your views here.
from django.contrib.auth import login, authenticate
from django.shortcuts import render, redirect
from django.views import generic
from .forms import SignUpForm
from .models import Blog, PostComment, Post
def index(request):
"""
View function for home page of site.
"""
# Render the HTML template index.html
return render(
request,
'index.html',
)
def redirectSearch(request):
return render(request,"search/search.html")
def signup(request):
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect('index')
else:
form = SignUpForm()
return render(request, 'signup.html', {'form': form})
class BlogListView(generic.ListView):
"""
Generic class-based view for a list of all blogs.
"""
model = Blog
paginate_by = 5
class PostListView(generic.ListView):
model = Post
paginate_by = 15
class PostCommentListView(generic.ListView):
model = PostComment
paginate_by = 15
from django.shortcuts import get_object_or_404
class BlogListbyAuthorView(generic.ListView):
"""
Generic class-based view for a list of blogs posted by a particular BlogAuthor.
"""
model = Blog
paginate_by = 5
template_name = 'blog/blog_list_by_author.html'
def get_queryset(self):
"""
Return list of Blog objects created by BlogAuthor (author id specified in URL)
"""
author = self.kwargs['author']
target_author = get_object_or_404(Blog, author=author)
return Blog.objects.filter(author=target_author)
def get_context_data(self, **kwargs):
"""
Add BlogAuthor to context so they can be displayed in the template
"""
# Call the base implementation first to get a context
context = super(BlogListbyAuthorView, self).get_context_data(**kwargs)
# Get the blogger object from the "pk" URL parameter and add it to the context
context['blogger'] = get_object_or_404(Blog, pk=self.kwargs['pk'])
return context
class BlogDetailView(generic.DetailView):
"""
Generic class-based detail view for a blog.
"""
model = Blog
paginate_by = 15
template_name = 'blog/blog_detail.html'
context_object_name = 'post_list'
def get_context_data(self, **kwargs):
context = super(BlogDetailView,self).get_context_data(**kwargs)
id = self.kwargs['pk']
target = get_object_or_404(Blog, pk=id)
context['post_list'] = Post.objects.filter(blog = target)
return context
def return_blog(self):
id = self.kwargs['pk']
blog = get_object_or_404(Blog, pk=id)
return blog
class PostDetailView(generic.DetailView):
"""
Generic class-based detail view for a post.
"""
model = Post
paginate_by = 15
template_name = 'blog/post_detail.html'
context_object_name = 'comment_list'
def get_context_data(self, **kwargs):
context = super(PostDetailView,self).get_context_data(**kwargs)
id = self.kwargs['pk']
target = get_object_or_404(Post, pk=id)
context['comment_list'] = PostComment.objects.filter(post = target)
return context
def return_post(self):
id = self.kwargs['pk']
post = get_object_or_404(Post, pk=id)
return post
def dispatch(self, request, *args, **kwargs):
id = self.kwargs['pk']
post = get_object_or_404(Post, pk=id)
if post.group != None:
if self.request.user.is_superuser or self.request.user.groups.filter(name=post.group).exists():
return super(PostDetailView, self).dispatch(request, *args, **kwargs)
else:
return render(request,'blog/auth_error.html')
else:
return super(PostDetailView, self).dispatch(request, *args, **kwargs)
class BloggerListView(generic.ListView):
"""
Generic class-based view for a list of bloggers.
"""
# trzeba przefiltrowac listview
model = Blog
paginate_by = 5
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic.edit import CreateView
from django.urls import reverse
class PostCommentCreate(LoginRequiredMixin, CreateView):
"""
Form for adding a blog comment. Requires login.
"""
model = PostComment
fields = ['content', ]
def get_context_data(self, **kwargs):
"""
Add associated blog to form template so can display its title in HTML.
"""
# Call the base implementation first to get a context
context = super(PostCommentCreate, self).get_context_data(**kwargs)
# Get the blog from id and add it to the context
context['post'] = get_object_or_404(Post, pk=self.kwargs['pk'])
return context
def form_valid(self, form):
"""
Add author and associated blog to form data before setting it as valid (so it is saved to model)
"""
# Add logged-in user as author of comment
form.instance.author = self.request.user
# Associate comment with blog based on passed id
form.instance.post = get_object_or_404(Post, pk=self.kwargs['pk'])
# Call super-class form validation behaviour
return super(PostCommentCreate, self).form_valid(form)
def get_success_url(self):
"""
After posting comment return to associated blog.
"""
return reverse('post-detail', kwargs={'pk': self.kwargs['pk'], })
class PostCreate(LoginRequiredMixin, CreateView):
"""
Form for adding a blog comment. Requires login.
"""
model = Post
fields = ['name','content','group', ]
def get_context_data(self, **kwargs):
"""
Add associated blog to form template so can display its title in HTML.
"""
# Call the base implementation first to get a context
context = super(PostCreate, self).get_context_data(**kwargs)
# Get the blog from id and add it to the context
context['blog'] = get_object_or_404(Blog, pk=self.kwargs['pk'])
return context
def form_valid(self, form):
"""
Add author and associated blog to form data before setting it as valid (so it is saved to model)
"""
# Associate comment with blog based on passed id
form.instance.blog = get_object_or_404(Blog, pk=self.kwargs['pk'])
# Call super-class form validation behaviour
return super(PostCreate, self).form_valid(form)
def get_success_url(self):
"""
After posting comment return to associated blog.
"""
return reverse('blog-detail', kwargs={'pk': self.kwargs['pk'], })
class BlogCreate(LoginRequiredMixin, CreateView):
"""
Form for adding a blog comment. Requires login.
"""
model = Blog
fields = ['name','description', ]
#def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
#context = super(BlogCreate, self).get_context_data(**kwargs)
# Get the blog from id and add it to the context
#context['user'] = get_object_or_404(Blog, pk=self.kwargs['pk'])
#return context
def form_valid(self, form):
form.instance.author = self.request.user
return super(BlogCreate, self).form_valid(form)
def get_success_url(self):
return reverse('blogs')
|
11,516 | 67395c63422ca82a8608100ef201ea05207429f2 | import os, sys
import warnings
warnings.filterwarnings("ignore")
import tensorflow as tf
sys.path.append('/'.join(os.path.abspath(__file__).split('/')))
from ornstein_auto_encoder import logging_daily
from ornstein_auto_encoder import configuration
from ornstein_auto_encoder.utils import argv_parse
from ornstein_auto_encoder.training import training
from ornstein_auto_encoder.extract_identity_feature import extract_identity_feature
if __name__ == '__main__':
argdict = argv_parse(sys.argv)
logger = logging_daily.logging_daily(argdict['log_info'][0])
logger.reset_logging()
log = logger.get_logging()
log.setLevel(logging_daily.logging.INFO)
if tf.__version__.startswith('2'):
gpus = tf.config.experimental.get_visible_devices(device_type='GPU')
try: tf.config.experimental.set_memory_growth(gpus, True)
except: pass
else:
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
log.info('Argument input')
for argname, arg in argdict.items():
log.info(' {}:{}'.format(argname,arg))
argdict_identity = {'path_info':argdict['path_info'], 'network_info':argdict['network_identity_info'], 'batch_size':['100']}
argdict_within_unit = {'path_info':argdict['path_info'], 'network_info':argdict['network_within_unit_info']}
argdict_total = {'path_info':argdict['path_info'], 'network_info':argdict['network_total_info']}
# Alternating optimization
for nrepeat in range(3):
log.info('----------------------------------------------------------------------------------------')
log.info(' %d th alternating ' % nrepeat)
log.info('----------------------------------------------------------------------------------------')
log.info(' %dth ideneity encoder update' % nrepeat)
log.info('----------------------------------------------------------------------------------------')
# 1) Update the identity encoder
training(argdict_identity, log, nrepeat=nrepeat)
extract_identity_feature(argdict_identity, log)
# 2) Update the within-unit variation encoder
log.info('----------------------------------------------------------------------------------------')
log.info(' %dth within-unit variation encoder update' % nrepeat)
log.info('----------------------------------------------------------------------------------------')
training(argdict_within_unit, log, nrepeat=nrepeat)
log.info('----------------------------------------------------------------------------------------')
# Fine-tune
log.info('----------------------------------------------------------------------------------------')
log.info(' Fine-tuning')
log.info('----------------------------------------------------------------------------------------')
training(argdict_total, log, nrepeat=nrepeat)
log.info('----------------------------------------------------------------------------------------')
log.info('Finished!')
log.info('----------------------------------------------------------------------------------------')
# sys.exit() |
11,517 | a4334c7ae30f58379fe0da5c3ce3cecf164de0af | """All the logic for commands does in this class"""
import os
import unittest
from server_file import Server
class Tester(unittest.TestCase):
"""This class test with given inputs to perform unittest.
Methods
-----------
test_create_folder(self):
tests the given input with user input and checks for all success and failure cases
of creating folder.
test_change_folder(self):
tests the given input with user input and checks for all success and failure cases
of changing folder.
test_read_from_file(self):
tests the given input with user input and checks for all success and failure cases
of reading from a file.
test_write_file(self):
tests the given input with user input and checks for all success and failure cases
of writing into text file.
"""
def test_create_folder(self):
"""tests the given input with user input and checks for all success and failure cases
of creating folder."""
test = Server()
inputs = [['create_folder','oook'],['create_folder','oook']]
response = ['folder created','Folder already exists. Try with another folder name']
res = []
for val in inputs:
res.append(test.create_folder(val))
self.assertListEqual(res, response)
def test_change_folder(self):
""" tests the given input with user input and checks for all success and failure cases
of changing folder."""
test = Server()
test.user_name = 'andy'
test.cur_dir = os.getcwd()
test.root_dir = os.getcwd()
inputs = [['change_folder', 'andy'], ['change_folder', 'name'],
['change_folder', 'name'],['change_folder', 'name'],['change_folder', '..'],
['change_folder', '..'],['change_folder', '..'],['change_folder', '..'] ]
path = os.path.join(os.getcwd(), test.user_name)
path1 = os.path.join(path, 'name')
path2 = os.path.join(path1, 'name')
path0 = os.path.join(path2, 'name')
path3 = os.path.normpath(path2 + os.sep + os.pardir)
path4 = os.path.normpath(path3 + os.sep + os.pardir)
path5 = os.path.normpath(path4 + os.sep + os.pardir)
paths = [path, path1, path2, path0, path3, path4, path5]
response = ['Directory is changed to {}'.format(paths[0]),
'Directory is changed to {}'.format(paths[1]),
'Directory is changed to {}'.format(paths[2]),
'folder is not found',
'Directory is changed to {}'.format(paths[4]),
'Directory is changed to {}'.format(paths[5]),
'Directory is changed to {}'.format(paths[6]), 'access denied']
#print(response)
res = []
for val in inputs:
res.append(test.change_folder(val))
self.assertListEqual(res, response)
def test_read_from_file(self):
"""tests the given input with user input and checks for all success and failure cases
of reading from a file."""
test = Server()
test.cur_dir = os.getcwd()
inputs = [['read_file', 'test_file.txt'],
['read_file', 'test_file.txt'],
['read_file', None],
['read_file', 'test_file.txt'] ]
response = ['Hello, this is a test file.',
'Message:The file is read completely. Nothing more to read from this file',
'name of the file should be given',
'Hello, this is a test file.']
res = []
for val in inputs:
res.append(test.read_from_file(val))
# print("****************************************")
# print(res)
self.assertListEqual(res, response)
def test_write_file(self):
"""tests the given input with user input and checks for all success and failure cases
of writing into text file."""
test = Server()
test.cur_dir = os.getcwd()
inputs = [['write_file', 'test_file1.txt', 'Hello world'],
['write_file', 'test_file2.txt', 'Hello world'],
['write_file', 'test_file1.txt']]
response = ['written successfully',
'file created and written successfully',
'contents erased successfully']
res = []
for val in inputs:
res.append(test.write_file(val))
self.assertListEqual(res, response)
if __name__ == '__main__':
unittest.main()
|
11,518 | 7af1be3b636addb839c864850db3909023296c40 | from mbuild import recipes
import mbuild as mb
from foyer import Forcefield
import sys
sys.path.append('../../../../../../')
from mosdef_slitpore.utils import charmm_writer as mf_charmm
Water_res_name = 'H2O'
Fake_water_res_name = 'h2o'
FF_file = '../../../../../../mosdef_slitpore/ffxml/pore-spce.xml'
FF_file_fake_water = '../../../../../../mosdef_slitpore/ffxml/FF_Fake_SPCE.xml'
water = mb.load('O', smiles=True)
water.name = Water_res_name
print('water.name = '+str(water.name))
water.energy_minimize(forcefield = FF_file , steps=10**9)
Fake_water = mb.load('O', smiles=True)
Fake_water.name = Fake_water_res_name
Fake_water.energy_minimize(forcefield = FF_file_fake_water , steps=10**9)
FF_Graphene_pore_w_solvent_fake_water_Dict = {'H2O' : FF_file, 'h2o' : FF_file_fake_water , 'BOT': FF_file, 'TOP': FF_file}
residues_Graphene_pore_w_solvent_fake_water_List = [Fake_water.name, water.name, 'BOT', 'TOP']
Fix_bonds_angles_fake_water_residues = [ water.name, Fake_water.name]
Fix_Graphene_residue = [ 'BOT', 'TOP']
#**************************************************************
# builds water reservoir (start)
#**************************************************************
box_reservior_w_fake_water = mb.fill_box(compound=[water,Fake_water],density=600,
box=[6,6,6], compound_ratio=[0.8,0.2])
#**************************************************************
# builds water reservoir (end)
#**************************************************************
#**************************************************************
# builds filled graphene slit for 16 Ang or 1.6nm (start)
#**************************************************************
# Create graphene system
pore_width_nm = 1.6
No_sheets = 3
sheet_spacing = 0.335
water_spacing_from_walls = 0.2
Total_waters_fake_water = 411
#for GOMC, currently we need to add the space at the end of the simulation
# this does not matter as we are using PBC's
empty_graphene_pore =recipes.GraphenePore(
pore_width=sheet_spacing ,
pore_length=3.0,
pore_depth=3.0,
n_sheets=No_sheets,
slit_pore_dim=2
)
empty_graphene_pore_shifted = empty_graphene_pore
n_fake_waters = 5
n_waters = Total_waters_fake_water - n_fake_waters
z_shift= 0
#note the default spacing of 0.2 automatically accounted for in the water box packing (i.e. adding 0.2 nm for 1 wall is really 0.4 nm)
water_between_pores = mb.fill_box(compound=[water,Fake_water], n_compounds= [n_waters, n_fake_waters] , box=[3.0, 3.0, pore_width_nm - water_spacing_from_walls*1])
water_between_pores.translate([0, 0, sheet_spacing*(2*No_sheets-1) + water_spacing_from_walls])
water_between_pores.translate([ -water_between_pores.center[0], -water_between_pores.center[1], z_shift])
empty_graphene_pore.translate([ -empty_graphene_pore.center[0], -empty_graphene_pore.center[1], z_shift])
filled_pore = empty_graphene_pore
filled_pore.add(water_between_pores, inherit_periodicity=False)
filled_pore.translate([ -filled_pore.center[0], -filled_pore.center[1], 0])
filled_pore.periodicity[2] = sheet_spacing*(2*No_sheets-1)+pore_width_nm
mf_charmm.charmm_psf_psb_FF(filled_pore,
'filled_pore_fake_water_3x3x1.6nm_3-layer',
structure_1 =box_reservior_w_fake_water ,
filename_1 = 'GOMC_reservior_fake_water_box',
FF_filename ="GOMC_pore_fake_water_FF" ,
forcefield_files= FF_Graphene_pore_w_solvent_fake_water_Dict ,
residues= residues_Graphene_pore_w_solvent_fake_water_List ,
Bead_to_atom_name_dict = None,
fix_residue = Fix_Graphene_residue,
fix_res_bonds_angles = Fix_bonds_angles_fake_water_residues,
reorder_res_in_pdb_psf = False
)
#**************************************************************
# builds filled graphene slit for 16 Ang or 1.6nm (end)
#**************************************************************
|
11,519 | c9011a4b75b0e39da3986e34ca61bcc593afcd7a | def birthdayCakeCandles(ar):
ar.sort()
max=ar[len(ar)-1]
x=0
for i in ar:
if i==max:
x=x+1
print(x)
ar_count = int(input())
ar = list(map(int, input().rstrip().split(' ')))
birthdayCakeCandles(ar) |
11,520 | fc872b19018d6d27309d18c19dd572035830598f | import markus
print('start main')
markus.main() |
11,521 | ddae115e5fd2f0c0938ab5cd41761b35c6980bfd | '''
Init
'''
from .default import *
|
11,522 | cce84b3ad27139883cc6497ff5e097d5c679572c | # Generated by Django 2.2.11 on 2020-03-16 15:57
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Shellies',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('shelly_id', models.CharField(max_length=100)),
('shelly_name', models.CharField(blank=True, max_length=200, null=True)),
('shelly_mac', models.CharField(blank=True, max_length=50, null=True)),
('shelly_ip', models.CharField(blank=True, max_length=50, null=True)),
('shelly_new_fw', models.BooleanField(default=False)),
('shelly_fw_version', models.CharField(max_length=100)),
('last_change_ts', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'shellies',
'managed': True,
},
),
]
|
11,523 | 14865dd8b054aea769794d6fd72b9fe26903baf9 | default_app_config = 'course_admin.apps.CourseadminConfig' |
11,524 | ed5e6a388ebff262652a074404fee33e80aec1f8 | from .models import (
User, UserProfile,
Education,
WorkExperience,
Specialization,
SecurityQuestion,
Answer,
KeyVal,
EmployeeBasicDetails,
EmployeeProfessionalDetails,
EmployeeEmploymentDetails,
EmployeeWorkDetails,
EmployeeAvailability,
Resume,
BankAccounts,
TestClass
)
from .mixins import UserCheckEmailPhoneApiMixin , UserCheckIdproofApiMixin , UserCheckProviderSocialMediaApiMixin
from .constants import ROLES
from core.serializers import Base64ImageField, Base64FileField
from google_places.models import google_places, Place
from rest_framework import serializers
from rest_framework_jwt.serializers import JSONWebTokenSerializer
from rest_framework_jwt.settings import api_settings
from rest_framework_jwt.compat import get_username_field, Serializer
from phonenumber_field.serializerfields import PhoneNumberField
from fcm_django.models import FCMDevice
from rest_framework.validators import UniqueValidator
from django.core.validators import MinLengthValidator
from common.models import (
Expertise, Experience, HourlyBudget, HighestQualification, Role, Bank
)
from common.serializers import HighestQualificationSerializer,ProgramsSerializer, RoleSerializer, BankSerializer
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
from datetime import datetime, date
from django.core.validators import RegexValidator
from django.db.models import Q
import json
from djmoney.money import Money
from rest_framework.reverse import reverse
from boto.s3.connection import S3Connection
from saffron_backend.settings import *
from django.core.files.storage import default_storage
import urllib
from rest_framework.response import Response
#Temporary Comment changes related to Dowlla Payment
from projects.task import create_customer
alphaSpaces = RegexValidator(r"^[a-zA-Z0-9.,']+$", 'Only alphanumerics are allowed.')
class MoneyField(serializers.Field):
default_error_messages = {
'positive_only': 'The amount must be positive.',
'not_a_number': 'The amount must be a number.'
}
def __init__(self, *args, **kwargs):
super(MoneyField, self).__init__(*args, **kwargs)
self.positive_only = kwargs.get('positive_only', True)
def to_representation(self, obj):
data = {'amount': float(obj.amount),
'currency': str(obj.currency),}
return data
def to_internal_value(self, data):
data = json.loads(data)
amount = data.get('amount')
currency = data.get('currency')
try:
obj = Money(amount, currency)
except decimal.InvalidOperation:
self.fail('not_a_number')
if obj < Money('0', currency) and self.positive_only:
self.fail('positive_only')
return obj
class EmailPhoneJSONWebTokenSerializer(JSONWebTokenSerializer):
"""
Auth field for login can be email or phone or username.
We need check which field in data and use it
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'email' in self.initial_data:
self.initial_data['email'] = self.initial_data['email'].lower()
elif 'phone' in self.initial_data:
del self.fields[self.username_field]
self.fields['phone'] = serializers.CharField()
elif 'user_name' in self.initial_data:
del self.fields[self.username_field]
self.fields['user_name'] = serializers.CharField()
@property
def username_field(self):
"""
Username field can be email or phone or username
"""
if 'phone' in self.initial_data:
return 'phone'
if 'user_name' in self.initial_data:
return 'user_name'
return get_username_field()
class SocialJSONWebTokenSerializer(serializers.Serializer):
pass
# def validate(self, attrs):
# request = self.context['request']
# #print("aaaaaaaaaaaaaa",request.user)
# if request.user:
# if not request.user.is_active:
# msg = 'User account is disabled.'
# raise serializers.ValidationError(msg)
# payload = jwt_payload_handler(request.user)
# return {
# 'token': jwt_encode_handler(payload),
# 'user': request.user
# }
# else:
# msg = 'Unable to log in with provided credentials.'
# raise serializers.ValidationError(msg)
class EducationSerializer(serializers.ModelSerializer):
"""
Serializer for employee educations
"""
class Meta:
model = Education
fields = ('date_start', 'date_end', 'degree', 'school')
class WorkExperienceSerializer(serializers.ModelSerializer):
"""
Serializer for employee work experience
"""
class Meta:
model = WorkExperience
fields = ('date_start', 'date_end', 'company', 'position',
'description')
class EmployeeEmploymentDetailSerializer(serializers.ModelSerializer):
userprofile_id = serializers.SerializerMethodField()
functional_areas_name = serializers.SerializerMethodField()
role_name = serializers.SerializerMethodField()
departments_name = serializers.SerializerMethodField()
duration = serializers.SerializerMethodField()
class Meta:
model = EmployeeEmploymentDetails
fields = ('id', 'present', 'current_employer', 'date_start', 'date_end','duration','current_designation', 'functional_areas','functional_areas_name','role','role_name', 'departments','departments_name','job_role','userprofile_id','is_completed')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def validate(self, attrs):
attrs = super().validate(attrs)
if not attrs.get('current_employer'):
raise serializers.ValidationError('Please set employer')
if not attrs.get('date_start'):
raise serializers.ValidationError('Please set start date')
if not attrs.get('date_end') and attrs.get('present') == False:
raise serializers.ValidationError('Please set end date')
if attrs.get('date_start') and attrs.get('date_end'):
if attrs.get('date_start') >= attrs.get('date_end'):
raise serializers.ValidationError('Please select start date less than end date')
return attrs
def get_functional_areas_name(self, obj):
if obj.functional_areas:
functional_areas = [i.title for i in obj.functional_areas.all()]
return functional_areas
def get_role_name(self, obj):
if obj.role:
role = [i.title for i in obj.role.all()]
return role
def get_departments_name(self, obj):
if obj.departments:
departments = [i.title for i in obj.departments.all()]
return departments
def get_duration(self, obj):
if obj.date_start and obj.date_end:
return str(obj.date_start.year) + '-' + str(obj.date_end.year)
elif obj.date_start and obj.present:
return str(obj.date_start.year) + '-' + 'Present'
class EmployeeWorkDetailSerializer(serializers.ModelSerializer):
userprofile_id = serializers.SerializerMethodField(allow_null=True,)
role = serializers.PrimaryKeyRelatedField(
queryset=Role.objects.all(), required=False, many=True
)
class Meta:
model = EmployeeWorkDetails
fields = ('id', 'client', 'project_title', 'from_date', 'to_date', 'project_location', 'role','employment_type','project_details', 'role_description', 'team_size', 'skill_used', 'userprofile_id','is_completed')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def validate(self, attrs):
attrs = super().validate(attrs)
if not attrs.get('client'):
raise serializers.ValidationError('Please set client')
if not attrs.get('project_title'):
raise serializers.ValidationError('Please set project title')
if not attrs.get('from_date'):
raise serializers.ValidationError('Please set from date')
if not attrs.get('to_date'):
raise serializers.ValidationError('Please set to date')
if not attrs.get('project_details'):
raise serializers.ValidationError('Please set project details')
if not attrs.get('role'):
raise serializers.ValidationError('Please set role')
return attrs
class UserProfileSerializer(serializers.ModelSerializer):
user_id = serializers.SerializerMethodField()
"""
Email and phone are not required.
We need check if they are are unique
"""
email = serializers.EmailField(source='user.email', required=False, allow_null=True, allow_blank=True) #validators=[UniqueValidator(queryset=User.objects.all())]
phone_number = PhoneNumberField(required=False, allow_null=True, allow_blank=True,validators=[UniqueValidator(queryset=UserProfile.objects.all())])
user_name = serializers.CharField(source='user.user_name', required=False, allow_null=True, allow_blank=True,)#validators=[UniqueValidator(queryset=User.objects.all())]
specializations = serializers.PrimaryKeyRelatedField(
queryset=Specialization.objects.all(), required=False, many=True
)
skills = serializers.PrimaryKeyRelatedField(
queryset=Specialization.objects.all(), required=False, many=True
)
educations = EducationSerializer(required=False, many=True)
works = WorkExperienceSerializer(required=False, many=True)
address = serializers.CharField(
required=False, allow_null=True, allow_blank=True
)
photo = Base64ImageField(required=False, allow_null=True)
photo_crop = serializers.CharField(source='get_photo_crop', read_only=True)
photo_bounds = serializers.JSONField(required=False, allow_null=True)
passport_photo = Base64ImageField(required=False, allow_null=True)
driver_license_photo = Base64ImageField(required=False, allow_null=True)
role = serializers.ChoiceField(choices=ROLES, required=False)
employees = serializers.PrimaryKeyRelatedField(
queryset=User.objects.all(), required=False, many=True
)
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'phone_number', 'user_name', 'address',
'photo', 'photo_crop', 'photo_bounds', 'passport_photo',
'driver_license_photo', 'role', 'user_id', 'phone_number',
'email', 'title', 'summary', 'specializations', 'skills',
'educations', 'works', 'employees','user_name','temp_password','zip','ssn')
depth = 1
def get_user_id(self, obj):
return obj.user.pk
def validate_address(self, value):
"""
Try validate address using google places API and save object
"""
# It seems we want delete address
if not value:
return None
results = google_places.text_search(query=value)
# We use google places autocomplite but we need recheck data
# (we do not believe the data from frontend) and get first result
if len(results.places) > 0:
p = results.places[0]
try:
place = Place.objects.get(api_id=p.id)
except Place.DoesNotExist:
place = Place()
place.populate_from_api(p)
return place
raise serializers.ValidationError('Please enter correct address')
def update(self, instance, validated_data):
educations_data = validated_data.pop('educations', None)
works_data = validated_data.pop('works', None)
email_data = None
username_data = None
is_tempuser = None
if 'user' in validated_data:
user = validated_data.pop('user')
username_data = user.pop('user_name', None)
email_data = user.pop('email', None)
is_tempuser = user.pop('is_tempuser', False)
instance = super().update(instance, validated_data)
if educations_data is not None:
s = EducationSerializer(data=educations_data, many=True)
s.is_valid(raise_exception=True)
s.save(userprofile=instance)
if works_data is not None:
s = WorkExperienceSerializer(data=works_data, many=True)
s.is_valid(raise_exception=True)
s.save(userprofile=instance)
if email_data is not None:
email_obj =User.objects.filter(~Q(email=''),~Q(id=instance.user.id),email = email_data).values_list('id', flat=True)
if email_obj.exists():
raise serializers.ValidationError({"email": "This email is already existing."})
else:
instance.user.email = email_data.lower()
instance.user.save()
if username_data is not None:
user_id =User.objects.filter(~Q(user_name=''),~Q(id=instance.user.id),user_name = username_data).values_list('id', flat=True)
if user_id.exists():
raise serializers.ValidationError({"user_name": "This username is already existing."})
else:
instance.user.user_name = username_data
instance.user.save()
if is_tempuser is not None:
instance.user.is_tempuser = False
instance.temp_password = None
instance.save()
instance.user.save()
if validated_data.get('role')=='employee':
EmployeeBasicDetails.objects.get_or_create(userprofile=instance)
EmployeeAvailability.objects.get_or_create(userprofile=instance)
Resume.objects.get_or_create(userprofile=instance)
# send push notification
devices = FCMDevice.objects.filter(
user=instance.user, active=True
)
devices.send_message(
title='Profile', body='You successfully updated your profile'
)
return instance
class SecurityAnswerSerializer(serializers.ModelSerializer):
"""
Serializer for Security Quetions answers
"""
user = serializers.ReadOnlyField(source='user_id')
keyval = KeyVal()
class Meta:
model = Answer
fields = ('security_question', 'user', 'response_text', 'vals')
def create(self, validated_data):
question_obj = SecurityQuestion.objects.get(id=validated_data.pop('security_question'))
answer, created = Answer.objects.update_or_create(
security_question=question_obj,
response_text=validated_data.pop('response_text'),
user=validated_data.pop('user')
)
return answer
class CreateUserSerializer(
UserCheckEmailPhoneApiMixin,
serializers.ModelSerializer
):
"""
Email and phone are not required.
We need check if they are set and they are unique
"""
phone = PhoneNumberField(required=False)
user_name = serializers.CharField(required=False, max_length=20, allow_null=True, allow_blank=True, validators=[MinLengthValidator(5)])
class Meta:
model = User
fields = ('email', 'phone','user_name', 'security_question', 'password')
extra_kwargs = {'password': {'write_only': True, 'required': False}}
def validate(self, attrs):
attrs = super().validate(attrs)
if not attrs.get('password'):
raise serializers.ValidationError('Please set password')
if attrs.get('user_name') and not attrs.get('security_question'):
raise serializers.ValidationError('Please select security question')
return attrs
def create(self, validated_data):
if validated_data.get('is_superuser') is True:
return User.objects.create_superuser(**validated_data)
if validated_data.get('email'):
validated_data['email']=validated_data['email'].lower()
return User.objects.create_user(**validated_data)
class AuthLazyUserConvertSerializer(serializers.ModelSerializer):
"""
Serializer for lazy users convert
"""
password = serializers.CharField()
class Meta:
model = User
fields = ('email', 'password')
class AuthLazyUserTokenSerializer(serializers.Serializer):
"""
Empty serializer for AuthLazyUserTokenViewSet. We don't need send any data
"""
pass
class CurrentUserRolesSerializer(serializers.Serializer):
"""
Serializer for current user roles
"""
name = serializers.CharField()
key = serializers.CharField()
is_primary = serializers.BooleanField()
class UserProfileShortDataSerializer(serializers.ModelSerializer):
"""
Serializer for user profile published information
"""
email = serializers.EmailField(source='user.email', read_only=True)
# photo_crop = serializers.CharField(source='get_photo_crop', read_only=True)
photo_crop = serializers.SerializerMethodField()
address = serializers.CharField(
required=False, allow_null=True, allow_blank=True
)
user_id = serializers.IntegerField(source='user.pk', read_only=True)
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'email', 'phone_number',
'photo_crop', 'user_id','address')
depth = 1
def get_photo_crop(self, instance):
url = ""
if instance.photo.name:
conn = S3Connection(AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,host=AWS_HOST)
#connecting to aws with specific endpoint reason
bucket = conn.get_bucket(AWS_S3_BUCKET_NAME)
#getting bucket from AWS
key = bucket.get_key(instance.photo.name, validate=True)
if key:
url = key.generate_url(900)
return "{}".format(url)
# def get_address(self, instance):
# print("aaaaaaaaaaaaaaaaaa",instance)
# #google_address = Place.objects.get(id =user_obj.userprofile.address)
# #user_address = google_address.formatted_address
class KeyValSerializer(serializers.ModelSerializer):
class Meta:
model = KeyVal
fields = ('key', 'value')
class SocialMediaSerializer(serializers.ModelSerializer):
provider = serializers.CharField(required=False)
id = serializers.CharField(required=True, allow_null=True)
email = serializers.EmailField(required=False)
phone = PhoneNumberField(required=False)
name = serializers.CharField(required=False)
#photoUrl = serializers.CharField(required=False, allow_null=False)
class Meta:
model = User
fields = ('email','phone','provider','id','name')
def validate(self, attrs):
if not attrs.get('name'):
raise serializers.ValidationError('Please set name')
if not attrs.get('provider'):
raise serializers.ValidationError('Please set provider')
if not attrs.get('id'):
raise serializers.ValidationError('Please set provider Id')
if not attrs.get('email') or attrs.get('phone'):
raise serializers.ValidationError('Please set either email or phone')
return attrs
def create(self,validated_data):
return User.objects.create_user(**validated_data)
class SecurityQuestionSerializer(serializers.ModelSerializer):
"""
Serializer for Security Quetions information
"""
vals = KeyValSerializer(required=False, many=True)
class Meta:
model = SecurityQuestion
fields = ('id', 'order', 'title', 'question_type','vals', 'is_active')
class EmployeeBasicDetailSerializer(serializers.ModelSerializer):
userprofile_id = serializers.SerializerMethodField()
first_name = serializers.CharField(source='userprofile.first_name', required=False, allow_null=True, allow_blank=True, validators=[alphaSpaces])
last_name = serializers.CharField(source='userprofile.last_name', required=False, allow_null=True, allow_blank=True, validators=[alphaSpaces])
photo = Base64ImageField(source='userprofile.photo', required=False, allow_null=True)
photo_crop = serializers.CharField(source='userprofile.get_photo_crop', read_only=True)
photo_bounds = serializers.JSONField(source='userprofile.photo_bounds', required=False, allow_null=True)
email = serializers.EmailField(source='userprofile.user.email', required=False, allow_null=True, allow_blank=True)
class Meta:
model = EmployeeBasicDetails
fields = ('id', 'first_name', 'last_name','photo', 'photo_crop', 'photo_bounds',
'date_of_birth', 'gender','marital_status','employee_status','hobbies', 'address_line1',
'address_line2', 'city', 'state', 'country', 'pin_code', 'contact_details', 'alternate_contact_details',
'userprofile_id', 'is_completed','total_experience','email')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def validate(self, attrs):
attrs = super().validate(attrs)
if attrs.get('userprofile'):
userprofile = attrs.get('userprofile')
if not userprofile.get('first_name'):
raise serializers.ValidationError('Please set first name')
if not userprofile.get('last_name'):
raise serializers.ValidationError('Please set last name')
if not userprofile.get('user').get('email'):
raise serializers.ValidationError('Please set email')
if not attrs.get('date_of_birth'):
raise serializers.ValidationError('Please set date of birth')
if not attrs.get('gender'):
raise serializers.ValidationError('Please set gender')
if not attrs.get('marital_status'):
raise serializers.ValidationError('Please set marital status')
if not attrs.get('total_experience'):
raise serializers.ValidationError('Please select total experience')
if not attrs.get('total_experience'):
raise serializers.ValidationError('Please select total experience')
return attrs
def update(self, instance, validated_data):
if 'userprofile' in validated_data:
userprofile = validated_data.pop('userprofile')
user = userprofile.pop('user', None)
email = user.pop('email')
first_name = userprofile.pop('first_name', None)
last_name = userprofile.pop('last_name', None)
photo = userprofile.pop('photo', None)
photo_crop = userprofile.pop('photo_crop', None)
photo_bounds = userprofile.pop('photo_bounds', None)
instance = super().update(instance, validated_data)
if first_name is not None:
instance.userprofile.first_name = first_name
instance.userprofile.save()
if last_name is not None:
instance.userprofile.last_name = last_name
instance.userprofile.save()
if photo is not None:
instance.userprofile.photo = photo
instance.userprofile.save()
if photo_crop is not None:
instance.userprofile.photo_crop = photo_crop
instance.userprofile.save()
if photo_bounds is not None:
instance.userprofile.photo_bounds = photo_bounds
instance.userprofile.save()
if email is not None:
instance.userprofile.user.email = email
instance.userprofile.user.save()
return instance
class EmployeeProfessionalDetailSerializer(serializers.ModelSerializer):
userprofile_id = serializers.SerializerMethodField()
highest_qualification_name = serializers.SerializerMethodField()
programs_name = serializers.SerializerMethodField()
university_name = serializers.SerializerMethodField()
duration = serializers.SerializerMethodField()
class Meta:
model = EmployeeProfessionalDetails
fields = ('id', 'highest_qualification','highest_qualification_name', 'programs','programs_name', 'university', 'university_name', 'other_university', 'campus', 'other_campus', 'from_date', 'to_date', 'duration', 'present', 'userprofile_id','is_completed')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def validate(self, attrs):
attrs = super().validate(attrs)
if not attrs.get('highest_qualification'):
raise serializers.ValidationError('Please set highest qualification')
if not attrs.get('from_date'):
raise serializers.ValidationError('Please set from date')
if not attrs.get('to_date') and attrs.get('present') == False:
raise serializers.ValidationError('Please set end date')
if attrs.get('from_date') and attrs.get('to_date'):
if attrs.get('from_date') > attrs.get('to_date'):
raise serializers.ValidationError('Please select start date less than end date')
return attrs
def get_highest_qualification_name(self, obj):
if obj.highest_qualification:
return obj.highest_qualification.title
def get_programs_name(self, obj):
if obj.programs:
return obj.programs.title
def get_university_name(self, obj):
if obj.university:
return obj.university.title
def get_duration(self, obj):
if obj.from_date and obj.to_date:
return str(obj.from_date.year) + '-' + str(obj.to_date.year)
elif obj.from_date and obj.present:
return str(obj.from_date.year) + '-' + 'Present'
class ResumeSerializer(serializers.ModelSerializer):
userprofile_id = serializers.SerializerMethodField()
resume = Base64FileField(
required=False, allow_null=True
)
# resume = serializers.FileField()
class Meta:
model = Resume
fields = ('id','resume','file_name','userprofile_id',)
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def validate(self, attrs):
attrs = super().validate(attrs)
if attrs.get('file_name'):
file_extension = attrs.get('file_name').split('.')[1]
if file_extension not in ['doc','docx','pdf','rtf','png','jpg','jpeg','txt']:
raise serializers.ValidationError('This type of file is not valid.')
return attrs
class EmployeeAvailabilitySerializer(serializers.ModelSerializer):
userprofile_id = serializers.SerializerMethodField()
class Meta:
model = EmployeeAvailability
fields = ('id', 'days_per_year', 'hours_per_day', 'hourly_charges', 'userprofile_id','is_completed')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
class EmployeeProfessionalShortDataSerializer(serializers.ModelSerializer):
"""
Serializer for Employee Short Professional details
"""
userprofile_id = serializers.SerializerMethodField()
highest_qualification = serializers.SerializerMethodField()
programs = serializers.SerializerMethodField()
university = serializers.SerializerMethodField()
# campus = serializers.SerializerMethodField()
class Meta:
model = EmployeeProfessionalDetails
fields = ('id', 'highest_qualification', 'programs', 'university', 'other_university', 'campus', 'other_campus', 'from_date', 'to_date', 'present', 'userprofile_id')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def get_highest_qualification(self, obj):
data = {}
if obj.highest_qualification:
data={'id': obj.highest_qualification.id, 'title': obj.highest_qualification.title}
return data
def get_programs(self, obj):
data = {}
if obj.programs:
data={'id': obj.programs.id, 'title': obj.programs.title}
return data
def get_university(self, obj):
data = {}
if obj.university:
data={'id': obj.university.id, 'title': obj.university.title}
return data
class EmployeeWorkShortDataSerializer(serializers.ModelSerializer):
"""
Serializer for Employee Short Work details
"""
userprofile_id = serializers.SerializerMethodField()
role = serializers.SerializerMethodField()
team_size = serializers.SerializerMethodField()
class Meta:
model = EmployeeWorkDetails
fields = ('id', 'client', 'project_title', 'from_date', 'to_date', 'project_location', 'role','employment_type','project_details', 'role_description', 'team_size', 'skill_used', 'userprofile_id')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def get_role(self, obj):
data = {}
return RoleSerializer(obj.role, many=True).data
def get_team_size(self, obj):
data = {}
if obj.team_size:
data={'id': obj.team_size.id, 'title': obj.team_size.title}
return data
class EmployeeEmploymentShortDataSerializer(serializers.ModelSerializer):
userprofile_id = serializers.SerializerMethodField()
functional_areas = serializers.SerializerMethodField()
role = serializers.SerializerMethodField()
departments = serializers.SerializerMethodField()
class Meta:
model = EmployeeEmploymentDetails
fields = ('id', 'present', 'current_employer', 'date_start', 'date_end', 'current_designation', 'functional_areas', 'role', 'departments', 'job_role','userprofile_id','is_completed')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def get_functional_areas(self, obj):
if obj.functional_areas:
functional_areas = [{'id':i.id,'title':i.title} for i in obj.functional_areas.all()]
return functional_areas
def get_role(self, obj):
if obj.role:
role = [{'id':i.id,'title':i.title} for i in obj.role.all()]
return role
def get_departments(self, obj):
if obj.departments:
departments = [{'id':i.id,'title':i.title} for i in obj.departments.all()]
return departments
class EmployeeAvailabilityShortDataSerializer(serializers.ModelSerializer):
userprofile_id = serializers.SerializerMethodField()
days_per_year = serializers.SerializerMethodField()
hours_per_day = serializers.SerializerMethodField()
hourly_charges = serializers.SerializerMethodField()
class Meta:
model = EmployeeAvailability
fields = ('id', 'days_per_year', 'hours_per_day', 'hourly_charges', 'userprofile_id')
def get_userprofile_id(self, obj):
return obj.userprofile.pk
def get_days_per_year(self, obj):
data = {}
if obj.days_per_year:
data={'id': obj.days_per_year.id, 'title': obj.days_per_year.title}
return data
def get_hours_per_day(self, obj):
data = {}
if obj.hours_per_day:
data={'id': obj.hours_per_day.id, 'title': obj.hours_per_day.title}
return data
def get_hourly_charges(self, obj):
data = {}
if obj.hourly_charges:
data={'id': obj.hourly_charges.id, 'title': obj.hourly_charges.title}
return data
class EmployeeDetailSerializer(serializers.ModelSerializer):
"""
Serializer for employee detail viewset
"""
basic_details = serializers.SerializerMethodField()
professional_details = serializers.SerializerMethodField()
employment_details = serializers.SerializerMethodField()
work_details = serializers.SerializerMethodField()
availability_details = serializers.SerializerMethodField()
resume = serializers.SerializerMethodField()
class Meta:
model = UserProfile
fields = ('id','basic_details','resume','professional_details','employment_details','work_details', 'availability_details')
def get_basic_details(self, obj):
basic_details = EmployeeBasicDetails.objects.filter(userprofile=obj)
return EmployeeBasicDetailSerializer(basic_details, many=True).data
def get_professional_details(self, obj):
professional_details = EmployeeProfessionalDetails.objects.filter(userprofile=obj)
return EmployeeProfessionalShortDataSerializer(professional_details, many=True).data
def get_employment_details(self, obj):
employment_details = EmployeeEmploymentDetails.objects.filter(userprofile=obj)
return EmployeeEmploymentShortDataSerializer(employment_details, many=True).data
def get_work_details(self, obj):
work_details = EmployeeWorkDetails.objects.filter(userprofile=obj)
return EmployeeWorkShortDataSerializer(work_details, many=True).data
def get_availability_details(self, obj):
availability_details = EmployeeAvailability.objects.filter(userprofile=obj)
return EmployeeAvailabilityShortDataSerializer(availability_details, many=True).data
def get_resume(self, obj):
data = {}
resume = Resume.objects.filter(userprofile=obj).values_list('id', 'file_name').first()
if resume:
data={'id': resume[0], 'file_name': resume[1]}
return data
class BankAccountsSerializer(serializers.ModelSerializer):
"""
Serializer for Bank Accounts
"""
bank_name = serializers.SerializerMethodField()
class Meta:
model = BankAccounts
fields = ('id', 'user', 'bank', 'bank_name', 'account_type', 'iban', 'account_holder', 'branch_identifier','branch_address','currency', 'bank_code','routing_number', 'customer_url','is_default','bank_account_no')
def validate(self, attrs):
attrs = super().validate(attrs)
if not attrs.get('bank'):
raise serializers.ValidationError('Please select Bank')
if not attrs.get('account_type'):
raise serializers.ValidationError('Please set Account Type')
if not attrs.get('iban'):
raise serializers.ValidationError('Please set IBAN')
if not attrs.get('account_holder'):
raise serializers.ValidationError('Please set Account Holder Name')
if not attrs.get('branch_identifier'):
raise serializers.ValidationError('Please set Branch Identifier')
if not attrs.get('branch_address'):
raise serializers.ValidationError('Please set Branch Address')
if not attrs.get('currency'):
raise serializers.ValidationError('Please set Currency')
if not attrs.get('routing_number'):
raise serializers.ValidationError({"routing_number": "Please set routing number"})
if not attrs.get('bank_account_no'):
raise serializers.ValidationError({"bank_account_no": "Please set Bank Account Number"})
return attrs
def get_bank_name(self, obj):
if obj.bank:
return obj.bank.title
###############################Temporary Comment changes related to Dowlla Payment##############################
def create(self, validated_data):
bank = validated_data.get('bank', None)
iban = validated_data.get('iban', None)
account_holder = validated_data.get('account_holder', None)
branch_identifier = validated_data.get('branch_identifier', None)
branch_address = validated_data.get('branch_address', None)
account_type = validated_data.get('account_type', None)
bank_account_no = validated_data.get('bank_account_no', None)
currency = validated_data.get('currency', None)
account_holder = validated_data.get('account_holder', None)
routing_number = validated_data.get('routing_number', None)
bank_name = bank.title
user = self.context['request'].user
user_profile = UserProfile.objects.get(user=user)
FirstName = user_profile.first_name
LastName = user_profile.last_name
email = user.email
businessName = user_profile.title
address1 = user_profile.address.name if user_profile.address else None
if bank_account_no is not None:
bank_obj = BankAccounts.objects.filter(bank_account_no = bank_account_no).values_list('bank_account_no', flat=True)
if bank_obj.exists():
raise serializers.ValidationError({"bank_account_no": "This account number is already existing."})
else:
data = {'FirstName':FirstName,'LastName':LastName,'businessName':businessName,'address1':address1,
'email':email,'bank_name':bank_name,'iban':iban,'account_holder':account_holder,'branch_identifier':branch_identifier,
'branch_address':branch_address,'account_type':account_type,'currency':currency,'routing_number':routing_number,
'bank_account_no':bank_account_no}
created_customer = create_customer(data)
if created_customer['status'] == 200:
instance = super().create(validated_data)
bankaccount_obj = BankAccounts.objects.get(id=instance.id)
bankaccount_obj.funding_source_url = created_customer['funding_source_url']
bankaccount_obj.save()
return instance
class TestClassSerializer(serializers.ModelSerializer):
"""
Serializer for Transactions
"""
photo = serializers.ImageField(required=False, allow_null=True,use_url=True)
class Meta:
model = TestClass
fields = ('id','photo','reference_no')
# def validate(self, attrs):
# attrs = super().validate(attrs)
# print ("ppppppppppppppppppppppppppppppppppp", attrs)
# return attrs
# def to_representation(self, obj):
# data = {'amount': float(obj.amount),
# 'currency': str(obj.currency),}
# return data
def to_representation(self, instance):
representation = super().to_representation(instance)
conn = S3Connection(AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,host=AWS_HOST)
#connecting to aws with specific endpoint reason
bucket = conn.get_bucket(AWS_S3_BUCKET_NAME)
#getting bucket from AWS
key = bucket.get_key(instance.photo.name, validate=True)
if key:
url = key.generate_url(900)
representation['photo'] = "{}".format(url)
return representation
# def to_internal_value(self, data):
# # data = json.loads(data)
# # photo = data.get('photo')
# # if self.context.get('request').method == "PUT" and not data.get('photo'):
# # instance = TestClass.objects.get(id=data.get('id'))
# # print ("aaaaaaaaaaaaaaaaaaaaaaaaaa", instance, data.get('id'))
# # data = data.copy()
# # data["photo"]= instance.photo
# # print ("llllllllllllllllllllllllll", err)
# # file = urllib.request.urlopen(photo)
# # # file = default_storage.open(photo, 'rb')
# # data = data.copy()
# # data["photo"]= file
# # data = super().to_internal_value(data)
# return data
def create(self, validated_data):
# print ("kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk", validated_data)
instance = super().create(validated_data)
return instance
|
11,525 | 7b687abbea16666eb55bad160c0c202b1d34d5ca | from django.db import models
from django.contrib.auth.models import AbstractUser
from mainapp.models import Template1
class User(AbstractUser):
USER_ROLE = (
('inspector', 'Инспектор'),
('office', 'Офис'),
('client', 'Клиент'),
)
phone = models.CharField('Телефон', max_length=50, blank=True)
role = models.CharField('Роль пользователя', null=True, blank=True, choices=USER_ROLE, max_length=50)
client_templates1 = models.ManyToManyField(Template1, verbose_name='Шаблоны', related_name='client') |
11,526 | 8422ecc52832ebaea63752e530110ad9b6f66d5a | # script1.py
from script2 import y, z
def x(i):
if i % 2:
z(i)
return y(i)
return z(i)
if __name__ == "__main__":
for i in range(3):
x(i)
z(i)
|
11,527 | 4432b06769b3715c8465bdf25e125285f3698b7f | import haystack
from haystack.signals import RealtimeSignalProcessor
from django.db.models import signals
from ..utils import check_solr
_old_sp = None
def setup():
check_solr()
global _old_sp
_old_sp = haystack.signal_processor
haystack.signal_processor = RealtimeSignalProcessor(haystack.connections, haystack.connection_router)
def teardown():
haystack.signal_processor = _old_sp
signals.post_save.receivers = []
signals.post_delete.receivers = []
|
11,528 | 6902836d40ef174415fc595845bad9e0914b6354 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time : 2020/8/7 上午11:18
# @Author : 尹新
# @Site :
# @File : config
# @Software: PyCharm
from happy_python import HappyConfigBase
class Config(HappyConfigBase):
"""
配置文件模板
"""
def __init__(self):
super().__init__()
self.section = 'main'
self.mail_host = ''
self.mail_port1 = 465
self.mail_port2 = 587
self.mail_user = ''
self.mail_pass = ''
self.sender = ''
self.receivers = []
|
11,529 | f0913bc638f3088aad834b4902aaa1410d2a9e39 | from django.template.response import TemplateResponse
from .forms import AppForm
def apps_review(request):
print("apps_review")
pass
def app_review(request):
print("app_review")
pass
def create_app(request):
app_form = AppForm(request.POST or None)
if app_form.is_valid():
app_form.save()
ctx = {'app_form': app_form}
return TemplateResponse(request, 'app/create_app.html', ctx)
def delete_app(request):
print("delete_app")
pass
|
11,530 | 855e6477ff1a9c48f47ab3e345d06801b1c93395 | from typing import List
from fastapi import Depends, FastAPI, HTTPException, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from sqlalchemy.orm import Session
from . import crud, models, schemas
from .database import SessionLocal, engine
models.Base.metadata.create_all(bind=engine)
import datetime as dt
import random
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
app.mount("/static", StaticFiles(directory="static"), name="static")
templates = Jinja2Templates(directory="templates")
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
allow_credentials=True,
)
# Dependency
def get_db():
try:
db = SessionLocal()
yield db
finally:
db.close()
@app.get("/")
async def home(request: Request):
# list of links to other routes
# home = crud.get_complaints(db, skip=skip, limit=limit)
return templates.TemplateResponse("index.html", {"request": request})
## start API and template routes
@app.get("/random_api")
def read_random(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
random = crud.get_random(db, skip=skip, limit=limit)
return random
@app.get("/random")
def show_random(request: Request):
return templates.TemplateResponse("random.html", {"request": request})
@app.get("/zipcode_api")
def read_zip_stats(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
zip_stats = crud.get_zip_stats(db, skip=skip, limit=limit)
return zip_stats
@app.get("/zipcode")
def show_zip_stats(request: Request):
return templates.TemplateResponse("zipcode.html", {"request": request})
@app.get("/distance_api")
def read_distance(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
distance = crud.get_distance(db, skip=skip, limit=limit)
return distance
@app.get("/distance")
def show_distance(request: Request):
return templates.TemplateResponse("long.html", {"request": request})
@app.get("/duration_api")
def read_duration(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
duration = crud.get_duration(db, skip=skip, limit=limit)
return duration
@app.get("/duration")
def show_duration(request: Request):
return templates.TemplateResponse("long.html", {"request": request})
@app.get("/redzone_api")
def read_red_zone(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
red_zone = crud.get_red_zone(db, skip=skip, limit=limit)
return red_zone
@app.get("/redzone")
def show_red_zone(request: Request):
return templates.TemplateResponse("redzone.html", {"request": request})
@app.get("/zero_distance_api")
def read_zero_distance(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
zero_distance = crud.get_zero_distance(db, skip=skip, limit=limit)
return zero_distance
@app.get("/zero_distance")
def show_zero_distance(request: Request):
return templates.TemplateResponse("nowhere.html", {"request": request})
## start API only routes
@app.get("/complaints_api")
def read_complaints(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
complaints = crud.get_complaints(db, skip=skip, limit=limit)
return complaints
@app.get("/scooter_trips")
def read_trips(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
trips = crud.get_trips(db, skip=skip, limit=limit)
return trips
@app.get("/zero_zip_code_api")
def read_zero_zip_code(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
zero_zip_code = crud.get_zero_zip_code(db, skip=skip, limit=limit)
return zero_zip_code
@app.get("/zero_month_api")
def read_zero_month(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
zero_month = crud.get_zero_month(db, skip=skip, limit=limit)
return zero_month
@app.get("/zero_weekday_api")
def read_zero_weekday(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
zero_weekday = crud.get_zero_weekday(db, skip=skip, limit=limit)
return zero_weekday
@app.get("/zero_hour_api")
def read_zero_hour(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
zero_hour = crud.get_zero_hour(db, skip=skip, limit=limit)
return zero_hour
# if __name__ == "__main__":
# app.run(debug=True)
|
11,531 | efa7ec6bc992aa3147da51ab3f63002a623a88a5 | #!/root/ysy/python/bin/python3
'''myerror
trigger error
'''
def age_fun1(name,age):
if not 1 < age < 120:
raise ValueError("age out of range")
print("%s is %s years old"%(name,age))
def age_fun2(name,age):
assert 1 < age < 120,"age out of range too"
print("%s is %s years old" % (name, age))
if __name__ == '__main__':
name = input('name:')
age = int(input('age:'))
age_fun1(name,age)
age_fun2(name,age) |
11,532 | 65969a74b27cf13f02dec3b82bdedd96a02c5231 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: model/detection/protos/losses.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='model/detection/protos/losses.proto',
package='model.detection.protos',
serialized_pb=_b('\n#model/detection/protos/losses.proto\x12\x16model.detection.protos\"\x9c\x02\n\x04Loss\x12\x43\n\x11localization_loss\x18\x01 \x01(\x0b\x32(.model.detection.protos.LocalizationLoss\x12G\n\x13\x63lassification_loss\x18\x02 \x01(\x0b\x32*.model.detection.protos.ClassificationLoss\x12\x44\n\x12hard_example_miner\x18\x03 \x01(\x0b\x32(.model.detection.protos.HardExampleMiner\x12 \n\x15\x63lassification_weight\x18\x04 \x01(\x02:\x01\x31\x12\x1e\n\x13localization_weight\x18\x05 \x01(\x02:\x01\x31\"\x97\x02\n\x10LocalizationLoss\x12I\n\x0bweighted_l2\x18\x01 \x01(\x0b\x32\x32.model.detection.protos.WeightedL2LocalizationLossH\x00\x12V\n\x12weighted_smooth_l1\x18\x02 \x01(\x0b\x32\x38.model.detection.protos.WeightedSmoothL1LocalizationLossH\x00\x12K\n\x0cweighted_iou\x18\x03 \x01(\x0b\x32\x33.model.detection.protos.WeightedIOULocalizationLossH\x00\x42\x13\n\x11localization_loss\">\n\x1aWeightedL2LocalizationLoss\x12 \n\x11\x61nchorwise_output\x18\x01 \x01(\x08:\x05\x66\x61lse\"D\n WeightedSmoothL1LocalizationLoss\x12 \n\x11\x61nchorwise_output\x18\x01 \x01(\x08:\x05\x66\x61lse\"\x1d\n\x1bWeightedIOULocalizationLoss\"\xb8\x02\n\x12\x43lassificationLoss\x12U\n\x10weighted_sigmoid\x18\x01 \x01(\x0b\x32\x39.model.detection.protos.WeightedSigmoidClassificationLossH\x00\x12U\n\x10weighted_softmax\x18\x02 \x01(\x0b\x32\x39.model.detection.protos.WeightedSoftmaxClassificationLossH\x00\x12]\n\x14\x62ootstrapped_sigmoid\x18\x03 \x01(\x0b\x32=.model.detection.protos.BootstrappedSigmoidClassificationLossH\x00\x42\x15\n\x13\x63lassification_loss\"E\n!WeightedSigmoidClassificationLoss\x12 \n\x11\x61nchorwise_output\x18\x01 \x01(\x08:\x05\x66\x61lse\"E\n!WeightedSoftmaxClassificationLoss\x12 \n\x11\x61nchorwise_output\x18\x01 \x01(\x08:\x05\x66\x61lse\"w\n%BootstrappedSigmoidClassificationLoss\x12\r\n\x05\x61lpha\x18\x01 \x01(\x02\x12\x1d\n\x0ehard_bootstrap\x18\x02 \x01(\x08:\x05\x66\x61lse\x12 \n\x11\x61nchorwise_output\x18\x03 \x01(\x08:\x05\x66\x61lse\"\xa0\x02\n\x10HardExampleMiner\x12\x1d\n\x11num_hard_examples\x18\x01 \x01(\x05:\x02\x36\x34\x12\x1a\n\riou_threshold\x18\x02 \x01(\x02:\x03\x30.7\x12J\n\tloss_type\x18\x03 \x01(\x0e\x32\x31.model.detection.protos.HardExampleMiner.LossType:\x04\x42OTH\x12%\n\x1amax_negatives_per_positive\x18\x04 \x01(\x05:\x01\x30\x12\"\n\x17min_negatives_per_image\x18\x05 \x01(\x05:\x01\x30\":\n\x08LossType\x12\x08\n\x04\x42OTH\x10\x00\x12\x12\n\x0e\x43LASSIFICATION\x10\x01\x12\x10\n\x0cLOCALIZATION\x10\x02')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_HARDEXAMPLEMINER_LOSSTYPE = _descriptor.EnumDescriptor(
name='LossType',
full_name='model.detection.protos.HardExampleMiner.LossType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='BOTH', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CLASSIFICATION', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOCALIZATION', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1606,
serialized_end=1664,
)
_sym_db.RegisterEnumDescriptor(_HARDEXAMPLEMINER_LOSSTYPE)
_LOSS = _descriptor.Descriptor(
name='Loss',
full_name='model.detection.protos.Loss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='localization_loss', full_name='model.detection.protos.Loss.localization_loss', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='classification_loss', full_name='model.detection.protos.Loss.classification_loss', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hard_example_miner', full_name='model.detection.protos.Loss.hard_example_miner', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='classification_weight', full_name='model.detection.protos.Loss.classification_weight', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='localization_weight', full_name='model.detection.protos.Loss.localization_weight', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=64,
serialized_end=348,
)
_LOCALIZATIONLOSS = _descriptor.Descriptor(
name='LocalizationLoss',
full_name='model.detection.protos.LocalizationLoss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='weighted_l2', full_name='model.detection.protos.LocalizationLoss.weighted_l2', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weighted_smooth_l1', full_name='model.detection.protos.LocalizationLoss.weighted_smooth_l1', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weighted_iou', full_name='model.detection.protos.LocalizationLoss.weighted_iou', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='localization_loss', full_name='model.detection.protos.LocalizationLoss.localization_loss',
index=0, containing_type=None, fields=[]),
],
serialized_start=351,
serialized_end=630,
)
_WEIGHTEDL2LOCALIZATIONLOSS = _descriptor.Descriptor(
name='WeightedL2LocalizationLoss',
full_name='model.detection.protos.WeightedL2LocalizationLoss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='anchorwise_output', full_name='model.detection.protos.WeightedL2LocalizationLoss.anchorwise_output', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=632,
serialized_end=694,
)
_WEIGHTEDSMOOTHL1LOCALIZATIONLOSS = _descriptor.Descriptor(
name='WeightedSmoothL1LocalizationLoss',
full_name='model.detection.protos.WeightedSmoothL1LocalizationLoss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='anchorwise_output', full_name='model.detection.protos.WeightedSmoothL1LocalizationLoss.anchorwise_output', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=696,
serialized_end=764,
)
_WEIGHTEDIOULOCALIZATIONLOSS = _descriptor.Descriptor(
name='WeightedIOULocalizationLoss',
full_name='model.detection.protos.WeightedIOULocalizationLoss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=766,
serialized_end=795,
)
_CLASSIFICATIONLOSS = _descriptor.Descriptor(
name='ClassificationLoss',
full_name='model.detection.protos.ClassificationLoss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='weighted_sigmoid', full_name='model.detection.protos.ClassificationLoss.weighted_sigmoid', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weighted_softmax', full_name='model.detection.protos.ClassificationLoss.weighted_softmax', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bootstrapped_sigmoid', full_name='model.detection.protos.ClassificationLoss.bootstrapped_sigmoid', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='classification_loss', full_name='model.detection.protos.ClassificationLoss.classification_loss',
index=0, containing_type=None, fields=[]),
],
serialized_start=798,
serialized_end=1110,
)
_WEIGHTEDSIGMOIDCLASSIFICATIONLOSS = _descriptor.Descriptor(
name='WeightedSigmoidClassificationLoss',
full_name='model.detection.protos.WeightedSigmoidClassificationLoss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='anchorwise_output', full_name='model.detection.protos.WeightedSigmoidClassificationLoss.anchorwise_output', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1112,
serialized_end=1181,
)
_WEIGHTEDSOFTMAXCLASSIFICATIONLOSS = _descriptor.Descriptor(
name='WeightedSoftmaxClassificationLoss',
full_name='model.detection.protos.WeightedSoftmaxClassificationLoss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='anchorwise_output', full_name='model.detection.protos.WeightedSoftmaxClassificationLoss.anchorwise_output', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1183,
serialized_end=1252,
)
_BOOTSTRAPPEDSIGMOIDCLASSIFICATIONLOSS = _descriptor.Descriptor(
name='BootstrappedSigmoidClassificationLoss',
full_name='model.detection.protos.BootstrappedSigmoidClassificationLoss',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='alpha', full_name='model.detection.protos.BootstrappedSigmoidClassificationLoss.alpha', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hard_bootstrap', full_name='model.detection.protos.BootstrappedSigmoidClassificationLoss.hard_bootstrap', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='anchorwise_output', full_name='model.detection.protos.BootstrappedSigmoidClassificationLoss.anchorwise_output', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1254,
serialized_end=1373,
)
_HARDEXAMPLEMINER = _descriptor.Descriptor(
name='HardExampleMiner',
full_name='model.detection.protos.HardExampleMiner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_hard_examples', full_name='model.detection.protos.HardExampleMiner.num_hard_examples', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=64,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iou_threshold', full_name='model.detection.protos.HardExampleMiner.iou_threshold', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.7,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_type', full_name='model.detection.protos.HardExampleMiner.loss_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_negatives_per_positive', full_name='model.detection.protos.HardExampleMiner.max_negatives_per_positive', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_negatives_per_image', full_name='model.detection.protos.HardExampleMiner.min_negatives_per_image', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_HARDEXAMPLEMINER_LOSSTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1376,
serialized_end=1664,
)
_LOSS.fields_by_name['localization_loss'].message_type = _LOCALIZATIONLOSS
_LOSS.fields_by_name['classification_loss'].message_type = _CLASSIFICATIONLOSS
_LOSS.fields_by_name['hard_example_miner'].message_type = _HARDEXAMPLEMINER
_LOCALIZATIONLOSS.fields_by_name['weighted_l2'].message_type = _WEIGHTEDL2LOCALIZATIONLOSS
_LOCALIZATIONLOSS.fields_by_name['weighted_smooth_l1'].message_type = _WEIGHTEDSMOOTHL1LOCALIZATIONLOSS
_LOCALIZATIONLOSS.fields_by_name['weighted_iou'].message_type = _WEIGHTEDIOULOCALIZATIONLOSS
_LOCALIZATIONLOSS.oneofs_by_name['localization_loss'].fields.append(
_LOCALIZATIONLOSS.fields_by_name['weighted_l2'])
_LOCALIZATIONLOSS.fields_by_name['weighted_l2'].containing_oneof = _LOCALIZATIONLOSS.oneofs_by_name['localization_loss']
_LOCALIZATIONLOSS.oneofs_by_name['localization_loss'].fields.append(
_LOCALIZATIONLOSS.fields_by_name['weighted_smooth_l1'])
_LOCALIZATIONLOSS.fields_by_name['weighted_smooth_l1'].containing_oneof = _LOCALIZATIONLOSS.oneofs_by_name['localization_loss']
_LOCALIZATIONLOSS.oneofs_by_name['localization_loss'].fields.append(
_LOCALIZATIONLOSS.fields_by_name['weighted_iou'])
_LOCALIZATIONLOSS.fields_by_name['weighted_iou'].containing_oneof = _LOCALIZATIONLOSS.oneofs_by_name['localization_loss']
_CLASSIFICATIONLOSS.fields_by_name['weighted_sigmoid'].message_type = _WEIGHTEDSIGMOIDCLASSIFICATIONLOSS
_CLASSIFICATIONLOSS.fields_by_name['weighted_softmax'].message_type = _WEIGHTEDSOFTMAXCLASSIFICATIONLOSS
_CLASSIFICATIONLOSS.fields_by_name['bootstrapped_sigmoid'].message_type = _BOOTSTRAPPEDSIGMOIDCLASSIFICATIONLOSS
_CLASSIFICATIONLOSS.oneofs_by_name['classification_loss'].fields.append(
_CLASSIFICATIONLOSS.fields_by_name['weighted_sigmoid'])
_CLASSIFICATIONLOSS.fields_by_name['weighted_sigmoid'].containing_oneof = _CLASSIFICATIONLOSS.oneofs_by_name['classification_loss']
_CLASSIFICATIONLOSS.oneofs_by_name['classification_loss'].fields.append(
_CLASSIFICATIONLOSS.fields_by_name['weighted_softmax'])
_CLASSIFICATIONLOSS.fields_by_name['weighted_softmax'].containing_oneof = _CLASSIFICATIONLOSS.oneofs_by_name['classification_loss']
_CLASSIFICATIONLOSS.oneofs_by_name['classification_loss'].fields.append(
_CLASSIFICATIONLOSS.fields_by_name['bootstrapped_sigmoid'])
_CLASSIFICATIONLOSS.fields_by_name['bootstrapped_sigmoid'].containing_oneof = _CLASSIFICATIONLOSS.oneofs_by_name['classification_loss']
_HARDEXAMPLEMINER.fields_by_name['loss_type'].enum_type = _HARDEXAMPLEMINER_LOSSTYPE
_HARDEXAMPLEMINER_LOSSTYPE.containing_type = _HARDEXAMPLEMINER
DESCRIPTOR.message_types_by_name['Loss'] = _LOSS
DESCRIPTOR.message_types_by_name['LocalizationLoss'] = _LOCALIZATIONLOSS
DESCRIPTOR.message_types_by_name['WeightedL2LocalizationLoss'] = _WEIGHTEDL2LOCALIZATIONLOSS
DESCRIPTOR.message_types_by_name['WeightedSmoothL1LocalizationLoss'] = _WEIGHTEDSMOOTHL1LOCALIZATIONLOSS
DESCRIPTOR.message_types_by_name['WeightedIOULocalizationLoss'] = _WEIGHTEDIOULOCALIZATIONLOSS
DESCRIPTOR.message_types_by_name['ClassificationLoss'] = _CLASSIFICATIONLOSS
DESCRIPTOR.message_types_by_name['WeightedSigmoidClassificationLoss'] = _WEIGHTEDSIGMOIDCLASSIFICATIONLOSS
DESCRIPTOR.message_types_by_name['WeightedSoftmaxClassificationLoss'] = _WEIGHTEDSOFTMAXCLASSIFICATIONLOSS
DESCRIPTOR.message_types_by_name['BootstrappedSigmoidClassificationLoss'] = _BOOTSTRAPPEDSIGMOIDCLASSIFICATIONLOSS
DESCRIPTOR.message_types_by_name['HardExampleMiner'] = _HARDEXAMPLEMINER
Loss = _reflection.GeneratedProtocolMessageType('Loss', (_message.Message,), dict(
DESCRIPTOR = _LOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.Loss)
))
_sym_db.RegisterMessage(Loss)
LocalizationLoss = _reflection.GeneratedProtocolMessageType('LocalizationLoss', (_message.Message,), dict(
DESCRIPTOR = _LOCALIZATIONLOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.LocalizationLoss)
))
_sym_db.RegisterMessage(LocalizationLoss)
WeightedL2LocalizationLoss = _reflection.GeneratedProtocolMessageType('WeightedL2LocalizationLoss', (_message.Message,), dict(
DESCRIPTOR = _WEIGHTEDL2LOCALIZATIONLOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.WeightedL2LocalizationLoss)
))
_sym_db.RegisterMessage(WeightedL2LocalizationLoss)
WeightedSmoothL1LocalizationLoss = _reflection.GeneratedProtocolMessageType('WeightedSmoothL1LocalizationLoss', (_message.Message,), dict(
DESCRIPTOR = _WEIGHTEDSMOOTHL1LOCALIZATIONLOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.WeightedSmoothL1LocalizationLoss)
))
_sym_db.RegisterMessage(WeightedSmoothL1LocalizationLoss)
WeightedIOULocalizationLoss = _reflection.GeneratedProtocolMessageType('WeightedIOULocalizationLoss', (_message.Message,), dict(
DESCRIPTOR = _WEIGHTEDIOULOCALIZATIONLOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.WeightedIOULocalizationLoss)
))
_sym_db.RegisterMessage(WeightedIOULocalizationLoss)
ClassificationLoss = _reflection.GeneratedProtocolMessageType('ClassificationLoss', (_message.Message,), dict(
DESCRIPTOR = _CLASSIFICATIONLOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.ClassificationLoss)
))
_sym_db.RegisterMessage(ClassificationLoss)
WeightedSigmoidClassificationLoss = _reflection.GeneratedProtocolMessageType('WeightedSigmoidClassificationLoss', (_message.Message,), dict(
DESCRIPTOR = _WEIGHTEDSIGMOIDCLASSIFICATIONLOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.WeightedSigmoidClassificationLoss)
))
_sym_db.RegisterMessage(WeightedSigmoidClassificationLoss)
WeightedSoftmaxClassificationLoss = _reflection.GeneratedProtocolMessageType('WeightedSoftmaxClassificationLoss', (_message.Message,), dict(
DESCRIPTOR = _WEIGHTEDSOFTMAXCLASSIFICATIONLOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.WeightedSoftmaxClassificationLoss)
))
_sym_db.RegisterMessage(WeightedSoftmaxClassificationLoss)
BootstrappedSigmoidClassificationLoss = _reflection.GeneratedProtocolMessageType('BootstrappedSigmoidClassificationLoss', (_message.Message,), dict(
DESCRIPTOR = _BOOTSTRAPPEDSIGMOIDCLASSIFICATIONLOSS,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.BootstrappedSigmoidClassificationLoss)
))
_sym_db.RegisterMessage(BootstrappedSigmoidClassificationLoss)
HardExampleMiner = _reflection.GeneratedProtocolMessageType('HardExampleMiner', (_message.Message,), dict(
DESCRIPTOR = _HARDEXAMPLEMINER,
__module__ = 'model.detection.protos.losses_pb2'
# @@protoc_insertion_point(class_scope:model.detection.protos.HardExampleMiner)
))
_sym_db.RegisterMessage(HardExampleMiner)
# @@protoc_insertion_point(module_scope)
|
11,533 | dd7e6073edcac533193552e9520349bc59146e98 | from Organisms.Animals.Animal import Animal
import random
class Fox(Animal):
def draw(self):
return "#ff5000"
def __init__(self, tile, world):
super().__init__(tile, world)
self.strength = 3
self.initiative = 7
def action(self):
moves = self.world.get_neighbours(self.tile)
moves = [tile for tile in moves if (tile.organism is None or tile.organism.strength <= self.strength)]
if not moves:
return
self.move(random.choice(moves))
|
11,534 | 7b3a6c55aad3be9f559f323c254cf51f21dd0b81 | import os
import csv
import matplotlib.pyplot as plt
import pandas as pd
path = "./result"
file_list = os.listdir(path)
instance_list = []
scheduler_list = []
for i in file_list:
if i.find("instance") != -1:
instance_list.append(i)
else:
scheduler_list.append(i)
results = []
for name in instance_list:
filename = "./result/" + name
df = pd.read_csv(filename, skiprows=12)
data = list(df.columns)
bar1 = name.find('_') + 1
bar2 = name.find('_', bar1) + 1
bar3 = name.find('_', bar2) + 1
bar4 = name.find('_', bar3) + 1
bar5 = name.find('_', bar4) + 1
bar6 = name.find('_', bar5) + 1
bar7 = name.find('_', bar6) + 1
algo = name[bar1:bar2-1]
mecha = name[bar2:bar3-1]
batch = int(name[bar4:bar5-1])
num = int(name[bar6:bar7-1])
result = []
result.extend((algo, mecha, batch, num))
result.extend((data[1], data[3], data[5]))
results.append(result)
with open("summary.csv", 'w', newline='') as file:
writer = csv.writer(file)
writer.writerow(["Algorithm", "Mechanism", "Batch Size", "Instance Number", "ANTT", "STP", "Fairness"])
for i in results:
writer.writerow(i)
df = pd.read_csv("summary.csv")
row = df.shape[0]
antt = [0] * 8
stp = [0] * 8
fairness = [0] * 8
num = [0] * 8
for i in range(row):
ser = df.iloc[i]
algo_num = 0
if ser['Algorithm'] == 'HPF':
algo_num = 0
elif ser['Algorithm'] == 'SJF':
algo_num = 1
elif ser['Algorithm'] == 'TOKEN':
algo_num = 2
elif ser['Algorithm'] == 'PREMA':
algo_num = 3
else:
continue
mecha_num = 0
if ser['Mechanism'] == 'STATIC':
mecha_num = 1
index = algo_num + mecha_num * 4
antt[index] += ser['ANTT']
stp[index] += ser['STP']
fairness[index] += ser['Fairness']
num[index] += 1
for i in range(8):
if num[i] == 0:
continue
else:
antt[i] /= num[i]
stp[i] /= num[i]
fairness[i] /= num[i]
index = ['HPF', 'SJF', 'TOKEN', 'PREMA', 'HPF', 'SJF', 'TOKEN', 'PREMA']
plt.subplot(2, 3, 1)
plt.bar(index[0:4], antt[0:4])
plt.title('ANTT')
plt.subplot(2, 3, 2)
plt.bar(index[0:4], fairness[0:4])
plt.title('Fairness')
plt.subplot(2, 3, 3)
plt.bar(index[0:4], stp[0:4])
plt.title('STP')
plt.subplot(2, 3, 4)
plt.bar(index[4:], antt[4:])
plt.title('ANTT(Static)')
plt.subplot(2, 3, 5)
plt.bar(index[4:], fairness[4:])
plt.title('Fairness(Static)')
plt.subplot(2, 3, 6)
plt.bar(index[4:], stp[4:])
plt.title('STP(Static)')
plt.rc('font', size=20)
plt.show() |
11,535 | e51e0757c3190eb0d89ed2b0ba68710ae7730b9b | from heapq import heappush, heappop
n, e = map(int, input().split())
INF = int(1e9)
data = [[] for _ in range(n + 1)]
for _ in range(e):
a, b, c = map(int, input().split())
data[a].append((c, b))
data[b].append((c, a))
def dijkstra(start, end):
distance = [INF for _ in range(n + 1)]
distance[start] = 0
q = [(0, start)]
while q:
dis, node = heappop(q)
if distance[node] < dis:
continue
for i in data[node]:
pop_dis, pop_node = i
cost = dis + pop_dis
if distance[pop_node] > cost:
distance[pop_node] = cost
heappush(q, (cost, pop_node))
return distance[end]
def solved():
v1, v2 = map(int, input().split())
result = dijkstra(1, v1)+dijkstra(v1, v2)+dijkstra(v2, n)
result2 = dijkstra(1, v2)+dijkstra(v2, v1)+dijkstra(v1, n)
if result >= INF:
print(-1)
else:
print(min(result, result2))
solved() |
11,536 | 5f6f8ff9aad6a51cd05ed5d9b0e9043ab9663ebe | # Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
##############################################################################
"""Perform inference on a single image or all images with a certain extension
(e.g., .jpg) in a folder.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from collections import defaultdict
import argparse
import cv2 # NOQA (Must import before importing caffe2 due to bug in cv2)
import glob
import logging
import os
import sys
import time
import pdb
import subprocess
from caffe2.python import workspace
from detectron.core.config import assert_and_infer_cfg
from detectron.core.config import cfg
from detectron.core.config import merge_cfg_from_file
from detectron.utils.io import cache_url
from detectron.utils.logging import setup_logging
from detectron.utils.timer import Timer
import detectron.core.test_engine as infer_engine
import detectron.datasets.dummy_datasets as dummy_datasets
import detectron.utils.c2 as c2_utils
import detectron.utils.vis as vis_utils
c2_utils.import_detectron_ops()
# OpenCL may be enabled by default in OpenCV3; disable it because it's not
# thread safe and causes unwanted GPU memory allocations.
cv2.ocl.setUseOpenCL(False)
def parse_args():
parser = argparse.ArgumentParser(description='End-to-end inference')
parser.add_argument(
'--cfg',
dest='cfg',
help='cfg model file (/path/to/model_config.yaml)',
default=None,
type=str
)
parser.add_argument(
'--wts',
dest='weights',
help='weights model file (/path/to/model_weights.pkl)',
default=None,
type=str
)
parser.add_argument(
'--output-dir',
dest='output_dir',
help='Directory to keep the output files',
default='/tmp/infer_vid',
type=str
)
parser.add_argument(
'--input-file',
dest='input',
help='Input video file',
default=None,
type=str
)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def main(args):
logger = logging.getLogger(__name__)
merge_cfg_from_file(args.cfg)
cfg.NUM_GPUS = 1
args.weights = cache_url(args.weights, cfg.DOWNLOAD_CACHE)
assert_and_infer_cfg(cache_urls=False)
model = infer_engine.initialize_model_from_cfg(args.weights)
dummy_coco_dataset = dummy_datasets.get_coco_dataset()
frame_no =0
# print( "capturing video")
cap = cv2.VideoCapture(args.input)
total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
# pdb.set_trace()
grab =1;
if (cap.isOpened()== False):
print("Error opening video stream or file")
exit
while (cap.isOpened() and grab <= total_frames):
print( "|Processing Frame {0}/{1} ".format(grab,total_frames))
grab += 1
captime = time.time()
ret_val, im = cap.read()
print('\t-Frame read in{: .3f}s'.format(time.time() - captime))
#skips intermediate frames
if grab%2 !=0:
continue
#uncomment to resize image
im = cv2.resize(im, (int(1280/1),int(720/1)))
timers = defaultdict(Timer)
t = time.time()
with c2_utils.NamedCudaScope(0):
cls_boxes, cls_segms, cls_keyps, cls_bodys = infer_engine.im_detect_all(
model, im, None, timers=timers
)
print('\t | Inference time: {:.3f}s'.format(time.time() - t))
for k, v in timers.items():
print('\t | {}: {:.3f}s'.format(k, v.average_time))
if 0:
logger.info(
' \ Note: inference on the first image will be slower than the '
'rest (caches and auto-tuning need to warm up)'
)
output_name = 'out.mp4'
ret = vis_utils.vis_one_image(
im[:, :, ::-1], # BGR -> RGB for visualization
output_name,
args.output_dir,
cls_boxes,
cls_segms,
cls_keyps,
cls_bodys,
dataset=dummy_coco_dataset,
box_alpha=0.3,
show_class=False,
thresh=0.7,
kp_thresh=2
)
if ret == True:
frame_no = frame_no +1
cap.release()
cv2.destroyAllWindows()
subprocess.call('ffmpeg -framerate 20 -i {}/file%02d.png -c:v libx264 -r 30 -pix_fmt yuv420p vid/out.mp4'
.format(os.path.join(args.output_dir, 'vid')),
shell=True)
if __name__ == '__main__':
workspace.GlobalInit(['caffe2', '--caffe2_log_level=0'])
setup_logging(__name__)
args = parse_args()
main(args)
|
11,537 | 9168a7a6b95e50d0b2da5ef4d9307c337f1f8f0b | from ea_adult_selection import adult_select_pareto_crowding_distance
from ea_genetic_operators import displacement_mutation, ordered_crossover, hill_climbing
from ea_genome import TSPGenome
from ea_parent_selection import parent_select_crowding_tournament
class InvalidDatasetError(Exception):
pass
class TSPProblem:
def __init__(self, distances, costs,run):
if len(distances) != len(costs):
raise InvalidDatasetError('Length of data sets not equal') # return Exception information
self.costs = costs
self.distances = distances
self.run = run
self.n_cities = len(costs)
self.population_size = 500
self.generation_limit = 100 #iteration number
self.genome = TSPGenome
self.genome_params = {
}
self.adult_select_method = adult_select_pareto_crowding_distance
self.parent_select_method = parent_select_crowding_tournament
self.parent_select_params = {
'tournament_size': 2
}
self.mutation_method = displacement_mutation
self.crossover_method = ordered_crossover
self.hill_climbing_method = hill_climbing
self.mutation_rate = 0.2
self.crossover_rate = 0.6
# mut: 0.001, 0.005, 0.01, 0.05, 0.1
# cross: 0.5, 0.6, 0.7, 0.8, 0.9
# best: mut 0.01, cross: 0.8
# pop/gen: 400/500, 200/1000, 100/2000, 50/4000
# best: 100/2000 |
11,538 | baca6e9ba078d33d59bba3f726e9dd99727c9374 | import sys
sys.path.append('../../../')
import keras
from keras.layers import Input, Dense, Activation
from keras.layers.merge import Maximum, Concatenate
from keras.models import Model
from keras.optimizers import Adam
from keras.utils import plot_model
# import required to load the attacked model
from autoencoder_BATADAL import load_AEED
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import accuracy_score, f1_score, roc_curve, auc, precision_score
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pickle
import os
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
def sort_temp_and_drop(row_index, temp):
"""
sort descending and drop values below theta.
given the current recontructions error we sort them descending and drop the values lower then theta treshold
Parameters
----------
row_index : int
temp : Pandas DataFrame
Autoencoder output vector
Returns
--------
recontrution vector sorted descending and dropped
"""
# print(temp)
temp = temp.sort_values(by=row_index, axis=1, ascending=False)
i = 0
for col in temp.columns:
if temp.loc[row_index, col] < theta:
break
i = i + 1
temp = temp.drop(columns=temp.columns[i:43])
return(temp.copy())
def scale_input_and_detect_single(index, X):
"""
given a row of the dataset we transform it with the scaler and we see if it is detected by the model.
Parameters
----------
index : int
row index
X : pandas DataFrame
Dataframe Containing one row of sensor readings
Returns
--------
bool
detection outcome
float
average recontrution error
pandas dataframe
reconstruction error vector for the considered sensor readings
"""
X_transformed = pd.DataFrame(
index=[index], columns=xset, data=scaler.transform(X))
Yhat, error, temp, _ = autoencoder.detect(
X_transformed, theta=theta, window=1, average=True)
return Yhat, error, temp
def scale_input_and_detect(index, X):
"""
given a row of the dataset we transform it with the scaler and we see if it is detected by the model.
Parameters
----------
index : int
row index
X : pandas DataFrame
Dataframe Containing one row of sensor readings
Returns
--------
bool
detection outcome
float
average recontrution error
pandas dataframe
reconstruction error vector for the considered sensor readings
"""
X_transformed = pd.DataFrame(
columns=xset, data=scaler.transform(X), index=X.index)
_, error, _, _ = autoencoder.detect(
X_transformed, theta=theta, window=1, average=True)
error_df = pd.DataFrame({'error': error})
X = pd.concat([X, error_df], axis=1)
X = X.iloc[X['error'].idxmin()]
# print(X)
error = X['error']
X = X.drop('error')
X = pd.DataFrame([X])
# print(X)
return X, error
def compute_mutation_factor(att_data, newBest):
"""
compute how many columns values have been changed at the end of the transformation.
Parameters
----------
att_data : pandas DataFrame
original sensor readings
newBest : pandas DataFrame
concealed sensor readings
"""
X2 = pd.DataFrame(index=att_data.index,
columns=xset, data=att_data[xset])
frames = [X2, newBest]
merge = pd.concat(frames)
merge.loc['Diff'] = merge.iloc[0] - merge.iloc[1]
changed_columns[row_index] = merge.loc['Diff'].astype(bool).sum()
print('changed tuples: ' + str(len(changed_columns)))
def choose_column(row_index, prev_col_name, changed_variables, max_concealable_variables):
"""
select the sensor value to be manipulated depending on the constrints
Parameters
----------
row_index : int
prev_col_name : string
changed_variables : list
variables that can be manipulated
max_concealable_variables : int
number of variables that can manipulated
Returns
----------
string
the column that will be manipulated in the current iteration
"""
if(prev_col_name == None):
return changed_variables[row_index][0]
return changed_variables[row_index][(changed_variables[row_index].index(prev_col_name)+1) % max_concealable_variables]
# this is the main algorithm, it actually transforms the input row trying to change its label.
# second attempt, updates after 5 changes on the same variable the ranking and optimizes this variable
def change_vector_label(row_index, att_data, solutions_found, changed_variables, variables):
"""
this is the main algorithm, it actually transforms the input row trying to change its predicted label.
updates after 5 changes on the same variable the ranking and optimizes the new ranked 1 variable
Parameters
----------
row_index : int
att_data : pandas DataFrame
original data to be concealed
solutions_found : int
counter of found solution
Returns
--------
pandas dataframe
solution found
int
updated counter of solutions
"""
original_vector = att_data.copy()
changes = 0
found_solution = 0
_, error, temp = scale_input_and_detect_single(row_index, att_data)
previous_best_error = error[row_index]
temp = sort_temp_and_drop(row_index, temp)
prev_col_name = None
num_changes_without_optimizations = 0
last_optimization = 0
newBest = att_data.copy()
optimized = False
changed_variables[row_index] = variables[max_concealable_variables]
while changes < budget and (changes - last_optimization) < patience and not(found_solution):
col_name = choose_column(row_index, temp, prev_col_name, num_changes_without_optimizations,
changed_variables, max_concealable_variables)
prev_col_name = col_name
if debug:
print('______________________________')
print(col_name)
print('______________________________')
values = np.arange(
normal_op_ranges[col_name]['min'], normal_op_ranges[col_name]['max']+0.1, normal_op_ranges[col_name]['step'])
# print(values)
att_data = att_data.append(
[att_data] * (len(values)), ignore_index=True)
att_data = att_data[:-1] # delete eccessive lenght
# substitute column values usign normal operations
att_data[col_name] = values
att_data, error = scale_input_and_detect(row_index, att_data)
if error < previous_best_error:
if debug:
print(error, previous_best_error)
previous_best_error = error
newBest = att_data.copy()
last_optimization = changes
num_changes_without_optimizations = 0
optimized = True
try:
if not(col_name) in changed_variables[row_index]:
changed_variables[row_index].append(col_name)
except:
changed_variables[row_index] = [col_name]
else:
optimized = False
if error < theta:
solutions_found = solutions_found + 1
found_solution = 1
print('Found solution number: ' + str(solutions_found))
if optimized == False:
num_changes_without_optimizations = num_changes_without_optimizations + 1
att_data = newBest.copy()
_, error, temp = scale_input_and_detect_single(
row_index, att_data)
temp = sort_temp_and_drop(row_index, temp)
changes = changes + 1
if debug:
print(temp)
print('--__--__--')
print(changes)
print('--__--__--')
compute_mutation_factor(original_vector, att_data.copy())
return newBest.copy(), solutions_found
"""
Select wich dataset are you considering
(we are not allowed to publish WADI data, please request them itrust Singapore website)
"""
dataset = 'BATADAL' #'WADI'
data_folder = '../../Data/'+dataset
if dataset == 'BATADAL':
attack_ids = range(1,15)
att_data = pd.read_csv(data_folder+'/attack_1_from_test_dataset.csv')
xset = [col for col in att_data.columns if col not in [
'Unnamed: 0', 'DATETIME', 'ATT_FLAG']]
budget = 200
patience = 15
if dataset == 'WADI':
attack_ids = [1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
att_data = pd.read_csv(data_folder+'/attack_1_from_test_dataset.csv')
xset = [col for col in att_data.columns if col not in [
'Row', 'DATETIME','ATT_FLAG', '2_MV_001_STATUS', '2_LT_001_PV', '2_MV_002_STATUS']]
budget = 300
patience = 40
yset = ['ATT_FLAG']
autoencoder = load_AEED("../../Attacked_Model/"+dataset+"/autoencoder.json", "../../Attacked_Model/"+dataset+"/autoencoder.h5")
scaler = pickle.load(open("../../Attacked_Model/"+dataset+"/scaler.p", "rb"))
with open("../../Attacked_Model/"+dataset+"/theta") as f:
theta = float(f.read())
normal_op_ranges = pickle.load(open('dict_'+dataset+'.p', 'rb'))
for att_number in attack_ids:
variables = {}
f = open("./constraints/"+dataset+"/constraint_variables_attack_"+str(att_number)+".txt", 'r').read()
variables = eval(f)
for max_concealable_variables in [2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20]:
debug = False
changed_columns = {}
changed_variables = {}
print('ATT NUMBER: '+str(att_number))
att_data = pd.read_csv(
data_folder+'/attack_'+str(att_number)+'_from_test_dataset.csv')
y_att = att_data[yset]
X = pd.DataFrame(index=att_data.index,
columns=xset, data=att_data[xset])
new_tuples = pd.DataFrame(columns=xset)
# main loop that iterates over every row in the dataset
changed_rows = 0
solutions_found = 0
max_spent_time = 0
sum_spent = 0
times = []
import time
for row_index, row in X.iterrows():
prov = pd.DataFrame(index=[row_index],
columns=xset, data=att_data[xset])
Yhat, original_error, temp = scale_input_and_detect_single(
row_index, prov)
if Yhat[row_index]:
start_time = time.time()
modified_row, solutions_found = change_vector_label(
row_index, prov, solutions_found, changed_variables, variables)
spent_time = time.time() - start_time
print("--- %s seconds ---" % spent_time)
sum_spent = sum_spent + spent_time
if max_spent_time < spent_time:
max_spent_time = spent_time
new_tuples = new_tuples.append(modified_row, ignore_index=True)
changed_rows = changed_rows + 1
times.append(spent_time)
else:
new_tuples = new_tuples.append(prov)
new_tuples['DATETIME'] = att_data['DATETIME']
new_tuples['ATT_FLAG'] = att_data['ATT_FLAG']
new_tuples.to_csv('./results/'+dataset+'/max_constraints_fixed/whitebox_attack_' +
str(att_number)+'_from_test_dataset_max_'+str(max_concealable_variables)+'.csv')
print('mean spent time: ' + str(sum_spent/changed_rows))
with open('./results/'+dataset+'/max_constraints_fixed/time_spent_new_sequential_v2.0_fixed_budget.txt', 'a') as f:
f.write('______attack: '+str(att_number)+'max: '+str(max_concealable_variables)+'______\n')
f.write('Mean: ' + str(np.mean(times)))
f.write('\n')
f.write('STD: ' + str(np.std(times)))
f.write('\n') |
11,539 | 486943180fbbf03e94e4458c00e083297db6dc47 | import unittest
import os
from itertools import combinations_with_replacement
from query import search, levenshtein
from index import Trie, DEFAULT_FILENAME
WORDS_FILENAME = '/tmp/words'
WORDS_CONTENT = "aa ab bb bc c".replace(' ', "\n")
WORDS = WORDS_CONTENT.split()
class TestIndex(unittest.TestCase):
def test_build(self):
index = Trie.build(WORDS_FILENAME)
self.assertTrue(index)
def test_persistence(self):
index = Trie.build(WORDS_FILENAME)
self.assertTrue(index)
index.dump(DEFAULT_FILENAME)
self.assertTrue(os.path.isfile(DEFAULT_FILENAME))
index2 = Trie.load(DEFAULT_FILENAME)
self.assertEqual(index, index2)
def test_adjacency_manual(self):
tests = {
'a': {'ab', 'aa', 'c'},
'aa': {'ab'},
'bb': {'bc', 'ab'},
'cc': {'c', 'bc'},
'c': {'bc'},
}
self._test_adjacency(tests)
def test_adjacency_auto(self):
tests = dict()
combinations = []
for n in range(4):
combinations.extend(combinations_with_replacement("".join(WORDS), 3))
for combination in combinations:
cword = "".join(combination)
adjacents = set()
for word in WORDS:
if word != cword and adjacency(word, cword):
adjacents.add(word)
tests[cword] = adjacents
self._test_adjacency(tests)
def _test_adjacency(self, tests):
index = Trie.build(WORDS_FILENAME)
for word, test_adjacents in tests.items():
adjacents = index.adjacents(word)
msg = "adj('%s') = %s, it should be %s" % (word, adjacents, test_adjacents)
self.assertEqual(adjacents, test_adjacents, msg)
class TestQuery(unittest.TestCase):
def test_levenshtein(self):
a = 'a'
tests = [('a', 0), ('b', 1), ('ab', 1), ('ba', 1), ('aaa', 2), ('bbb', 3)]
for b, test in tests:
result = levenshtein(a, b)
msg = "lev('%s', '%s') = %i, it should be %i" % (a, b, result, test)
self.assertEqual(result, test, msg)
def test_search(self):
index = Trie.build(WORDS_FILENAME)
tests = [
['a', 'c'],
['a', 'ab', 'bb', 'bc', 'c'],
['a', 'aa', 'ab', 'bb', 'bc', 'c'],
]
paths = list(search(index, 'a', 'c'))
self.assertEqual(paths, tests)
#limit
paths2 = list(search(index, 'a', 'c', 2))
self.assertEqual(paths2, tests[:2])
def adjacency(a, b):
lendiff = len(a) - len(b)
if lendiff is 0:
distance = 0
for i in range(len(a)):
distance += a[i] != b[i]
if distance > 1:
return False
return True
if abs(lendiff) is 1:
if lendiff > 0:
a, b = b, a
for i in range(len(a)):
if a[i] != b[i]:
for j in range(i, len(a)):
if a[j] != b[j+1]:
return False
return True
return False
if __name__ == '__main__':
with open(WORDS_FILENAME, 'wt') as file:
file.writelines(WORDS_CONTENT)
unittest.main()
os.remove(WORDS_FILENAME)
|
11,540 | 7e5496a802278837a12a6a9e812287135f464468 | from contextlib import contextmanager
from uuid import uuid4
import pandas as pd
from google.cloud import bigquery
from .basic import Profiler
class SqlProfiler(Profiler):
def __init__(self, project, sql, *args, **kwargs):
super().__init__(project, sql)
self.client = bigquery.Client(project=project)
def get_stats(self):
with tmp_table(self.client, self.sql) as table_ref:
self.profile = get_stats(self.client, self.project, table_ref)
def to_file(self, filename):
self.profile.to_csv(filename, index=False)
def to_bq(self, table, disposition):
self.profile.to_gbq(table, project_id=self.project, if_exists=disposition)
@contextmanager
def tmp_table(client, sql):
dataset_id = f"tmp_dataset_{uuid4().hex}"
table_id = f"tmp_{uuid4().hex}"
job_config = bigquery.QueryJobConfig()
dataset_ref = client.dataset(dataset_id)
client.create_dataset(bigquery.Dataset(dataset_ref))
job_config.destination = dataset_ref.table(table_id)
table_ref = None
try:
query_job = client.query(sql, location="US", job_config=job_config)
query_job.result()
table_ref = dataset_ref.table(table_id)
yield table_ref
finally:
try:
client.delete_table(table_ref)
except:
pass
client.delete_dataset(client.dataset(dataset_id))
def aggregate(f, table_ref, i, empty_string):
return (
f"SELECT"
f" '{f.name}' name,"
f" '{f.field_type}' type,"
f" COUNT({f.name}) count,"
f" {f'AVG({f.name})' if f.field_type in ('INTEGER', 'FLOAT', 'NUMERIC') else 'null'} average,"
f" {f'STDDEV_SAMP({f.name})' if f.field_type in ('INTEGER', 'FLOAT', 'NUMERIC') else 'null'} std,"
f" CAST(MAX({f.name}) AS STRING) max,"
f" CAST(MIN({f.name}) AS STRING) min,"
f" CAST(APPROX_TOP_COUNT({f.name}, 1)[ORDINAL(1)].value AS STRING) mode,"
f" COUNT(*) - COUNT({f.name}) miss_count,"
f" SAFE_DIVIDE(COUNT(*) - COUNT({f.name}),"
f" COUNT(*)) miss_rate,"
f" {f'DATE_DIFF(MAX({f.name}), MIN({f.name}), DAY) + 1 - COUNT(DISTINCT {f.name})' if f.field_type == 'DATE' else '0'} miss_days,"
f" COUNT(DISTINCT {f.name}) unique_count,"
f" SAFE_DIVIDE(COUNT(DISTINCT {f.name}), COUNT({f.name})) unique_rate,"
f" {f'CAST(APPROX_QUANTILES({f.name}, 4)[ORDINAL(2)] AS STRING)' if f.field_type not in ('STRUCT', 'ARRAY') else 'null'} quantile4_1,"
f" {f'CAST(APPROX_QUANTILES({f.name}, 4)[ORDINAL(3)] AS STRING)' if f.field_type not in ('STRUCT', 'ARRAY') else 'null'} median,"
f" {f'CAST(APPROX_QUANTILES({f.name}, 4)[ORDINAL(4)] AS STRING)' if f.field_type not in ('STRUCT', 'ARRAY') else 'null'} quantile4_3,"
f" {f'CAST(APPROX_QUANTILES({f.name}, 100)[ORDINAL(2)] AS STRING)' if f.field_type not in ('STRUCT', 'ARRAY') else 'null'} quantile100_1,"
f" {f'CAST(APPROX_QUANTILES({f.name}, 100)[ORDINAL(100)] AS STRING)' if f.field_type not in ('STRUCT', 'ARRAY') else 'null'} quantile100_99,"
f" {f'COUNTIF({f.name} >= 0)' if f.field_type in ('INTEGER', 'FLOAT', 'NUMERIC') else '0'} not_negatives,"
f" {f'COUNTIF({f.name} = 0)' if f.field_type in ('INTEGER', 'FLOAT', 'NUMERIC') else '0'} zeros,"
f" {f'COUNTIF({f.name} = {empty_string})' if f.field_type == 'STRING' else '0'} empty_strings,"
f" {i} ord FROM {table_ref.dataset_id}.{table_ref.table_id}"
)
def get_stats(client, project, table_ref, empty_string='""', max_size=50):
schema = client.get_table(f"{table_ref.dataset_id}.{table_ref.table_id}").schema
num_columns = len(schema)
num_repeats = -(-num_columns // max_size)
sqls = (
" UNION ALL ".join(
aggregate(f, table_ref, j * max_size + i, empty_string)
for i, f in enumerate(schema[j * max_size : min(num_columns, (j + 1) * max_size)])
)
+ " ORDER BY ord;"
for j in range(num_repeats)
)
dfs = (pd.read_gbq(sql, project_id=project, dialect="standard") for sql in sqls)
return pd.concat(dfs)
|
11,541 | f9971ef3370082d5dca2f679c5146184ab74b815 | import pandas as pd
a = int(-9999)
df=pd.read_csv("/home/marcela/Downloads/RECOMMENDATIONFIX/recommendations_cleanup/test_9999.csv", quotechar='"');
print(df['multi_graph_id'])
df['multi_graph_id']=df['multi_graph_id'].fillna("-9999")
df['recommended_width']=df['recommended_width'].fillna("-9999")
print(df['multi_graph_id'])
print(df['recommended_width'])
df['output_size_id'] = df['output_size_id'].astype(str)
print(df['output_size_id'])
df.to_csv("/home/marcela/Downloads/RECOMMENDATIONFIX/recommendations_cleanup/test_9999_converted.csv", sep=',', encoding='utf-8',index=False, quotechar='"')
# data = [['Austria', 'Germany', 'hob', 'Australia'],
# ['Spain', 'France', 'Italy', 'Mexico']]
#
# df = pd.DataFrame(data, columns = ['id','shop_id','method','ref_size_id','target_size_id','target_shop_shoe_id','external_id','info','is_recommendation_session','ip','created_at','updated_at','session_id','ref_shoe_id','output_size_numeric','output_size_id','output_scale','available','rounded','ref_size_numeric','recommended_size_numeric','precision','multi_graph_id','ref_shoe_scale','recommended_size_scale','recommended_width','request_id'])
#
# # Values to find and their replacements
# findL = ['NULL']
# replaceL = [-9999]
# #
# # # Select column (can be A,B,C,D)
# col = 'multi_graph_id';
#
# # Find and replace values in the selected column
# df[col] = df[col].replace(findL, replaceL) |
11,542 | a2aa040caf30dc642da3c241e4478470dfa4e756 | def alphabet_position(letter):
alphabet = "abcdefghijklmnopqrstuvwxyz"
return alphabet.index(letter.lower())
def rotate_character(char, rot):
alphabet = "abcdefghijklmnopqrstuvwxyz"
if char.isalpha():
idx = (alphabet_position(char) + rot) % 26
if char.islower():
return alphabet[idx]
else:
return alphabet[idx].upper()
else:
return char
def rotate_string(text, rot):
encrypt_text = ""
for character in text:
encrypt_text += rotate_character(character, rot)
return encrypt_text |
11,543 | 71758a8fb7f01126817975a201df7138fcfab5ba | def index(src, search):
try:
if (src.index(search) >= 0 ):
return src.index(search);
except ValueError as ve:
return -1; |
11,544 | 74be04192aa747f2fcd81b73f8d74903fcf86d57 | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
from django.views.generic.base import TemplateView
from homepage.views import HomePageView
from profiles.views import ProfileRedirectView
admin.autodiscover()
handler404 = 'homepage.views.error_404_view'
handler500 = 'homepage.views.error_500_view'
urlpatterns = patterns('',
# Examples:
# url(r'^myapp/', include('myapp.urls')),
url(r'^$', HomePageView.as_view(), name='home'),
url(r'^social/', include('social.apps.django_app.urls', namespace='social')),
url(r'^datasets/', include('package.urls')),
url(r'^grids/', include('grid.urls')),
url(r'^profiles/', include('profiles.urls')),
url(r'^publishers/', include('publisher.urls')),
url(r'^datamap/', include('datamap.urls')),
url(r'^download/', include('downloads.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/', include('api.urls')),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^accounts/profile/$', ProfileRedirectView.as_view()),
# static pages
url(r'^about/$', TemplateView.as_view(template_name='pages/faq.html'), name='about'),
url(r'^terms/$', TemplateView.as_view(template_name='pages/terms.html'), name='terms'),
url(r'^faq/$', TemplateView.as_view(template_name='pages/faq.html'), name='faq'),
url(r'^syndication/$', TemplateView.as_view(template_name='pages/syndication.html'), name='syndication'),
url(r'^help/$', TemplateView.as_view(template_name='pages/help.html'), name='help'),
# This requires that static files are served from the 'static' folder.
# The apache conf is set up to do this for you, but you will need to do it
# on dev
(r'/favicon.ico', 'django.views.generic.base.RedirectView',
{'url': '{0}images/favicon.ico'.format(settings.STATIC_URL)}),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
11,545 | e15dc6d20c691a643fb184992e45bb6d3f55aac1 | class car:
def __init__(self, speed, color, name, is_police):
self.name = name
self.speed = speed
self.color = color
self.is_police = is_police
print(f"New {self.color} {self.name} is born!")
def go(self):
print(f"{self.color} car {self.name} is going")
def show_speed(self):
print(f"{self.color} car {self.name} speed is {self.speed}")
def stop(self):
self.speed = 0
print(f"{self.name} is stopped")
def turn(self,direction):
self.direction = direction
print(f"{self.color} car {self.name} turn {self.direction}")
class TownCar(car):
def show_speed(self):
if self.speed > 60:
print(f"{self.name} -Over speed!")
class WorkCar(car):
def show_speed(self):
if self.speed > 40:
print(f"{self.name} - Over speed!")
class SportCar(car):
pass
class PoliceCar(car):
pass
TCar = TownCar(70, 'red', 'ford focus', False)
WCar = WorkCar(50, 'blue', 'nissan', False)
PCar = PoliceCar(150,'black','Ford Mustang',True)
TCar.go()
TCar.turn("left")
TCar.show_speed()
TCar.stop()
WCar.show_speed()
PCar.show_speed()
|
11,546 | b2ec682fba5d5eec999d160d44c829f93d8caa36 | # -*- coding: utf-8 -*-
# @Time : 2018/4/9 14:53:17
# @Author : SilverMaple
# @Site : https://github.com/SilverMaple
# @File : routes.py
import hashlib
import os
import logging
import sys
import shutil
import json
import subprocess
import time
from datetime import datetime
from app.decorators import async
from flask import render_template, flash, redirect, url_for, request, g, \
jsonify, current_app, session, make_response
from flask_login import current_user, login_required
from flask_babel import _, get_locale
from flask_uploads import UploadSet
from guess_language import guess_language
from app import db
from app.main.forms import EditProfileForm, PostForm, SearchForm, AddAppForm, AddAppExtensionForm, EditAppExtensionForm, \
AddAppAdminForm, AddTenantForm, AddTenantDatabaseForm, EditTenantDatabaseForm, AddAppCodeForm, AddRoleForm, AddUserForm
from app.models import User, Post, App, AppAdmin, AppExpand, AdminToApp, Tenant, TenantDb, AppCode, SaasRole, SaasUser
from app.translate import translate
from app.main import bp
from app.email import follower_notification
from app.auth import LoginType, current_login_type
from app import auth
from pip._internal import commands
from requests import Response
from werkzeug.datastructures import FileStorage
from werkzeug.test import EnvironBuilder
from werkzeug.utils import secure_filename
from werkzeug.security import generate_password_hash
logger = logging.getLogger("MirrorConstruct")
# logger = logging.getLogger("MirrorConstruct")
formatter = logging.Formatter('[%(asctime)s] %(message)s')
blank_formatter = logging.Formatter('')
# formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ## %(message)s')
file_handler = logging.FileHandler("logs/mirror_construct.log")
file_handler.setFormatter(formatter) # 可以通过setFormatter指定输出格式
# 为logger添加的日志处理器
logger.addHandler(file_handler)
logger.setLevel(logging.DEBUG)
@bp.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
g.search_form = SearchForm()
g.locale = str(get_locale())
@bp.route('/', methods=['GET', 'POST'])
@bp.route('/index', methods=['GET', 'POST'])
@login_required
def index():
form = PostForm()
if form.validate_on_submit():
language = guess_language(form.post.data)
if language == 'UNKNOWN' or len(language) > 5:
language = ''
post = Post(body=form.post.data, author=current_user,
language=language)
db.session.add(post)
db.session.commit()
flash(_('Your post is now live!'))
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
posts = current_user.followed_posts().paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title=_('Home'), form=form,
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@bp.route('/index_registe')
def index_registe():
if current_user.is_authenticated and auth.current_login_type == LoginType.REGISTE_MANAGE:
return render_template('index_registe_manage.html', title=_('Registe Manage'))
else:
auth.current_login_type = LoginType.REGISTE_MANAGE
return redirect(url_for('auth.login'))
@bp.route('/index_app')
def index_app():
if current_user.is_authenticated and auth.current_login_type == LoginType.WEB_APP_MANAGE:
app_list = [a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
app_name_list = [a.name for a in data]
current_selected_app_name = None
if session.get('current_selected_app_name'):
current_selected_app_name = session['current_selected_app_name']
return render_template('index_app_manage.html', title=_('Web App Manage'), app_name_list=app_name_list,
current_selected_app_name=current_selected_app_name)
else:
auth.current_login_type = LoginType.WEB_APP_MANAGE
return redirect(url_for('auth.login'))
@bp.route('/index_tenant')
def index_tenant():
if current_user.is_authenticated and auth.current_login_type == LoginType.TENANT_SERVICE:
return render_template('index_tenant_service.html', title=_('Tenant Service'))
else:
auth.current_login_type = LoginType.TENANT_SERVICE
return redirect(url_for('auth.login'))
@bp.route('/explore')
@login_required
def explore():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(Post.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title=_('Explore'),
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@bp.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
posts = user.posts.order_by(Post.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.user', username=user.username,
page=posts.next_num) if posts.has_next else None
prev_url = url_for('main.user', username=user.username,
page=posts.prev_num) if posts.has_prev else None
return render_template('user.html', user=user, posts=posts.items,
next_url=next_url, prev_url=prev_url)
@bp.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(current_user.username)
if form.validate_on_submit():
current_user.username = form.username.data
current_user.about_me = form.about_me.data
db.session.commit()
flash(_('Your changes have been saved.'))
return redirect(url_for('main.edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', title=_('Edit Profile'),
form=form)
@bp.route('/follow/<username>')
@login_required
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash(_('User %(username)s not found.', username=username))
return redirect(url_for('main.index'))
if user == current_user:
flash(_('You cannot follow yourself!'))
return redirect(url_for('main.user', username=username))
current_user.follow(user)
db.session.commit()
flash(_('You are following %(username)s!', username=username))
follower_notification(user, current_user)
return redirect(url_for('main.user', username=username))
@bp.route('/unfollow/<username>')
@login_required
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash(_('User %(username)s not found.', username=username))
return redirect(url_for('main.index'))
if user == current_user:
flash(_('You cannot unfollow yourself!'))
return redirect(url_for('main.user', username=username))
current_user.unfollow(user)
db.session.commit()
flash(_('You are not following %(username)s.', username=username))
return redirect(url_for('main.user', username=username))
@bp.route('/translate', methods=['POST'])
@login_required
def translate_text():
return jsonify({'text': translate(request.form['text'],
request.form['source_language'],
request.form['dest_language'])})
@bp.route('/search')
@login_required
def search():
if not g.search_form.validate():
return redirect(url_for('main.explore'))
page = request.args.get('page', 1, type=int)
posts, total = Post.search(g.search_form.q.data, page,
current_app.config['POSTS_PER_PAGE'])
next_url = url_for('main.search', q=g.search_form.q.data, page=page + 1) \
if total > page * current_app.config['POSTS_PER_PAGE'] else None
prev_url = url_for('main.search', q=g.search_form.q.data, page=page - 1) \
if page > 1 else None
return render_template('search.html', title=_('Search'), posts=posts,
next_url=next_url, prev_url=prev_url)
# ---------------------------------------------------------------------------------------
# registe manage app setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_setting')
@login_required
def registe_manage_app_setting():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('App List'), AppAdmin=AppAdmin,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_add():
form = AddAppForm(None)
if form.validate_on_submit():
app_id = hashlib.md5(form.app_name.data.encode(encoding='UTF-8')).hexdigest()
db.session.add(App(id=None, name=form.app_name.data, appid=app_id))
db.session.commit()
flash(_('New app have been added.'))
return redirect(url_for('main.registe_manage_app_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('Add New App'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App'))
@bp.route('/registe_manage_app_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = App.query.filter(App.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('App List'), AppAdmin=AppAdmin,
isCheck=isCheck, isEdit=isEdit, session=session,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_setting'))
@bp.route('/registe_manage_app_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_edit(id):
if session.get('validate_app_name'):
form = AddAppForm(session['validate_app_name'])
else:
form = AddAppForm(None)
if form.validate_on_submit():
current_data = App.query.filter(App.id == id).first()
current_data.name = form.app_name.data
db.session.commit()
flash(_('App have been edited.'))
return redirect(url_for('main.registe_manage_app_setting'))
elif request.method == 'GET':
current_data = App.query.filter(App.id == id).first()
form.app_name.data = current_data.name
form.app_ID.data = current_data.appid
if AppAdmin.query.filter(AppAdmin.id == current_data.creater_id).first():
form.creator_name.data = AppAdmin.query.filter(AppAdmin.id == current_data.creater_id).first().name
session['validate_app_name'] = form.app_name.data
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('Edit App'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App'))
# ---------------------------------------------------------------------------------------
# registe manage app extension
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_extension')
@login_required
def registe_manage_app_extension():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Type'), _('Tag Template/Begin'), _('Tag Template/End'), _('Library File'), _('DB Initial Path')]
data = AppExpand.query.order_by(db.asc(AppExpand.type)).all()
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('App Extension List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_extension_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_add():
form = AddAppExtensionForm(None)
if form.validate_on_submit():
upload = UploadSet()
if hasattr(form.library_file.data, 'filename'):
filename1 = secure_filename(form.library_file.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library'), filename1).replace('\\', '/')
form.library_file.data.save(filePath1)
else:
filePath1=''
if hasattr(form.library_file_depend.data, 'filename'):
filename2 = secure_filename(form.library_file_depend.data.filename)
filePath2 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library_depend'), filename2).replace('\\', '/')
form.library_file_depend.data.save(filePath2)
else:
filePath2 = ''
db.session.add(AppExpand(id=None, type=form.app_type.data, pattern_begin=form.tag_begin.data,
pattern_end=form.tag_end.data, library_path=filePath1,
library_depend_path=filePath2,
library_desc=form.library_file_description.data,
db_init_path=form.db_info_file_path.data))
db.session.commit()
flash(_('New app extension have been added.'))
return redirect(url_for('main.registe_manage_app_extension'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('Add New App Extension'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App Extension'))
@bp.route('/registe_manage_app_extension_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = AppExpand.query.filter(AppExpand.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Type'), _('Tag Template/Begin'), _('Tag Template/End'), _('Library File'), _('DB Initial Path')]
data = AppExpand.query.all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('App Extension List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_extension_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_extension'))
@bp.route('/registe_manage_app_extension_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_edit(id):
if session.get('validate_app_type'):
form = EditAppExtensionForm(session['validate_app_type'])
else:
form = EditAppExtensionForm(None)
if form.validate_on_submit():
current_data = AppExpand.query.filter(AppExpand.id == id).first()
current_data.type = form.app_type.data
current_data.pattern_begin = form.tag_begin.data
current_data.pattern_end = form.tag_end.data
current_data.library_desc = form.library_file_description.data
current_data.db_init_path = form.db_info_file_path.data
# print(form.library_file.data == '')
# print(form.library_file.data)
form.library_file.description = _('Selected File: ') + os.path.basename(current_data.library_path)
form.library_file_depend.description = _('Selected File: ') + os.path.basename(current_data.library_depend_path)
if hasattr(form.library_file.data, 'filename'):
filename1 = secure_filename(form.library_file.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library'), filename1).replace('\\', '/')
form.library_file.data.save(filePath1)
current_data.library_path = filePath1
if hasattr(form.library_file_depend.data, 'filename'):
filename2 = secure_filename(form.library_file_depend.data.filename)
filePath2 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library_depend'), filename2).replace('\\', '/')
form.library_file_depend.data.save(filePath2)
current_data.library_depend_path = filePath2
db.session.commit()
flash(_('App have been edited.'))
return redirect(url_for('main.registe_manage_app_extension'))
elif request.method == 'GET':
current_data = AppExpand.query.filter(AppExpand.id == id).first()
form.app_type.data =current_data.type
form.tag_begin.data = current_data.pattern_begin
form.tag_end.data = current_data.pattern_end
form.library_file.description = _('Selected File: ') + os.path.basename(current_data.library_path)
form.library_file_depend.description = _('Selected File: ') + os.path.basename(current_data.library_depend_path)
form.library_file_description.data = current_data.library_desc
form.db_info_file_path.data = current_data.db_init_path
session['validate_app_type'] = form.app_type.data
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('Edit App Extension'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App Extension'))
# ---------------------------------------------------------------------------------------
# registe manage app manager setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_manager_setting')
@login_required
def registe_manage_app_manager_setting():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Manager Name'), _('App Name')]
data = {}
preData = AppAdmin.query.all()
for p in preData:
managerName = p.name
for temp in AdminToApp.query.filter(AdminToApp.app_admin_id == p.id):
appName = App.query.filter(App.id == temp.app_id).first().name
if data.get(managerName):
data[managerName]['name'].append(appName)
else:
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = []
data[managerName]['name'].append(appName)
if not data.get(managerName):
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = ''
else:
data[managerName]['name'].sort()
data[managerName]['name'] = '; '.join(data[managerName]['name'])
data['sort'] = list(data.keys())
data['sort'].sort()
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('App Manager List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_manager_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_add():
form = AddAppAdminForm(None)
if form.validate_on_submit():
db.session.add(AppAdmin(id=None, name=form.app_admin_name.data,
password=generate_password_hash(form.app_admin_password.data)))
db.session.commit()
app_admin_id = AppAdmin.query.filter(AppAdmin.name == form.app_admin_name.data).first().id
for app_name in form.app_list.data:
app_id = App.query.filter(App.name == app_name).first().id
db.session.add(AdminToApp(id=None, app_admin_id=app_admin_id, app_id=app_id))
db.session.commit()
flash(_('New app manager have been added.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('Add New App Manager'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App Manager'))
@bp.route('/registe_manage_app_manager_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = AppAdmin.query.filter(AppAdmin.id == session['current_delete_id']).first()
for removeAdminToApp in AdminToApp.query.filter(AdminToApp.app_admin_id==current_data.id).all():
db.session.delete(removeAdminToApp)
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Manager Name'), _('App Name')]
data = {}
preData = AppAdmin.query.all()
for p in preData:
managerName = p.name
for temp in AdminToApp.query.filter(AdminToApp.app_admin_id == p.id):
appName = App.query.filter(App.id == temp.app_id).first().name
if data.get(managerName):
data[managerName]['name'].append(appName)
else:
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = []
data[managerName]['name'].append(appName)
if not data.get(managerName):
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = ''
else:
data[managerName]['name'].sort()
data[managerName]['name'] = '; '.join(data[managerName]['name'])
data['sort'] = list(data.keys())
data['sort'].sort()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('App Manager List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_manager_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
@bp.route('/registe_manage_app_manager_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_edit(id):
if session.get('validate_app_admin_name'):
form = AddAppAdminForm(session['validate_app_admin_name'])
else:
form = AddAppAdminForm(None)
if form.validate_on_submit():
old_app_list = session['old_app_list'] if session.get('old_app_list') else []
new_app_list = form.app_list.data
add_app_list = [a for a in new_app_list if a not in old_app_list]
remove_app_list = [a for a in old_app_list if a not in new_app_list]
current_data = AppAdmin.query.filter(AppAdmin.id == id).first()
current_data.name = form.app_admin_name.data
if not form.app_admin_password.data.strip() == '':
current_data.password = generate_password_hash(form.app_admin_password.data)
for a in add_app_list:
add_app_id = App.query.filter(App.name == a).first().id
db.session.add(AdminToApp(id=None, app_admin_id=id, app_id=add_app_id))
for a in remove_app_list:
remove_app_id = App.query.filter(App.name == a).first().id
removeAdminToApp = AdminToApp.query.filter(AdminToApp.app_admin_id==id, AdminToApp.app_id==remove_app_id).first()
db.session.delete(removeAdminToApp)
db.session.commit()
flash(_('App Admin have been edited.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
elif request.method == 'GET':
current_data = AppAdmin.query.filter(AppAdmin.id == id).first()
app_list = [a.app_id for a in AdminToApp.query.filter(AdminToApp.app_admin_id == id)]
app_name_list = [App.query.filter(App.id == a).first().name for a in app_list]
form.app_admin_name.data = current_data.name
form.app_list.data = app_name_list
session['validate_app_admin_name'] = form.app_admin_name.data
session['old_app_list'] = app_name_list
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('Edit App Manager'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App Manager'))
# ---------------------------------------------------------------------------------------
# registe manage app tenant setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_tenant_setting')
@login_required
def registe_manage_app_tenant_setting():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Tenant Name'), _('App Tenant ID'), _('App Name')]
data = Tenant.query.order_by(db.asc(Tenant.name)).all()
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('App Tenant List'), App=App,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_tenant_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_add():
form = AddTenantForm(None)
if form.validate_on_submit():
app_id = App.query.filter(App.name == form.app_list.data).first().id
db.session.add(Tenant(id=None, name=form.tenant_name.data,
password=generate_password_hash(form.tenant_password.data),
tenantid=hashlib.md5(form.tenant_name.data.encode(encoding='UTF-8')).hexdigest(),
app_id=app_id))
db.session.commit()
flash(_('New Tenant have been added.'))
return redirect(url_for('main.registe_manage_app_tenant_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('Add New App Tenant'), form=form,
addTitle=('Add New App Tenant'))
@bp.route('/registe_manage_app_tenant_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = Tenant.query.filter(Tenant.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Tenant Name'), _('App Tenant ID'), _('App Name')]
data = Tenant.query.order_by(db.asc(Tenant.name)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('App Tenant List'), App=App,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_tenant_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
@bp.route('/registe_manage_app_tenant_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_edit(id):
if session.get('validate_app_tenant_name'):
form = AddTenantForm(session['validate_app_tenant_name'])
else:
form = AddTenantForm(None)
if form.validate_on_submit():
current_data = Tenant.query.filter(Tenant.id == id).first()
current_data.name = form.tenant_name.data
if not form.tenant_password.data.strip() == '':
current_data.password = generate_password_hash(form.tenant_password.data)
app_id = App.query.filter(App.name == form.app_list.data).first().id
current_data.app_id = app_id
db.session.commit()
flash(_('App Tenant have been edited.'))
return redirect(url_for('main.registe_manage_app_tenant_setting'))
elif request.method == 'GET':
current_data = Tenant.query.filter(Tenant.id == id).first()
app_name = App.query.filter(App.id == current_data.app_id).first().name
form.tenant_name.data = current_data.name
form.app_list.data = app_name
form.tenant_id.data = current_data.tenantid
session['validate_app_tenant_name'] = form.tenant_name.data
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('Edit App Tenant'), form=form,
editTitle=('Edit App Tenant'))
# ---------------------------------------------------------------------------------------
# app manage change current app
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_set_current_app', methods=['GET', 'POST'])
@login_required
def app_manage_set_current_app():
if request.method == 'POST':
data = request.get_json()
name = data.get('name')
current_data = App.query.filter(App.name == name).first()
if current_data:
session['current_selected_app_id'] = current_data.id
session['current_selected_app_name'] = current_data.name
flash(_('Switch current app success!'))
return jsonify({'result': 'success'})
def get_app_name_list():
app_list = [a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
app_name_list = [a.name for a in data]
return app_name_list
def get_current_selected_app_name():
current_selected_app_name = None
if session.get('current_selected_app_name'):
current_selected_app_name = session['current_selected_app_name']
return current_selected_app_name
# ---------------------------------------------------------------------------------------
# app manage app list
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_app_list')
@login_required
def app_manage_app_list():
isCheck = True
tHead = [_('App Name'), _('App ID'), _('Creator')]
app_list = [ a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [ App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
return render_template('app_manage_app_list.html', title=_('App List'),
tableName=_('App List'), AppAdmin=AppAdmin, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, tHead=tHead, data=data)
# ---------------------------------------------------------------------------------------
# app manage code configure
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_function_configure')
@login_required
def app_manage_function_configure():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('app_manage_function_configure.html', title=_('Online Function'),
tableName=_('Function Configure'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_function_configure_test')
@login_required
def app_manage_function_configure_test():
testFunc()
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('app_manage_function_configure.html', title=_('Online Function'),
tableName=_('Function Configure'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
def testFunc():
filePath = 'F:/test/main.html'
pattern = 'x;/<dd>.*API监控.*<\/dd>/{p;q};/<dd>.*<\/dd>/{x;h;d;ta};/<dd>.*/{x;H;ta};{x;h;d};:a'
tag_begin = '{if .role_APIguanli}'
tag_end = '{end}'
args = 'cat -n %s | sed -n "%s" | { eval $(awk \'NR==1{print "a="$1} END {print "b="$1}\'); ' \
'sed -e "$a i %s" -e "$b a %s" %s;} > F:/test/test.txt' % (filePath, pattern, tag_begin, tag_end, filePath)
shell_file = open('F:/test/temp.sh', 'w', encoding='utf-8')
shell_file.write(args)
shell_file.flush()
shell_file.close()
exec_path = "D:\Program Files\Git\git-bash.exe"
print(args)
(status, output) = subprocess.getstatusoutput([exec_path, 'F:/test/temp.sh'])
print(status, output)
@bp.route('/get_file_path/<tag>', methods=['GET', 'POST'])
@login_required
def get_file_path(tag):
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
if tag == 'version2package.json' or tag == 'package2function.json':
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id), tag)
if os.path.isfile(filePath):
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure_html'], app_id), tag)
return jsonify({'result': 'success', 'filePath': filePath})
return jsonify({'result': 'fail', 'filePath': False})
@bp.route('/app_manage_init_file/<tag>', methods=['GET', 'POST'])
@login_required
def app_manage_init_file(tag):
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
initJson = [
{
"data": {
"file_path": "",
"item_pattern": ""
},
"id": "Root",
"parent": "#",
"text": "Root"
}
]
if tag in ['version2package.json', 'package2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
new_file.write(json.dumps(initJson))
new_file.close()
flash(_('File initial for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File initial for %(tag)s failed.', tag=tag))
return jsonify({'result': 'success'})
@bp.route('/app_manage_save_file', methods=['GET', 'POST'])
@login_required
def app_manage_save_file():
data = request.get_json()
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
tag = data['tag']
new_json = json.loads(data['json'])
print(new_json)
if tag in ['version2package.json', 'package2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
# new_file.write(json.dumps(new_json))
# json.dump(new_json, new_file, ensure_ascii=False, indent=4)
json.dump(new_json, new_file, indent=4)
new_file.close()
flash(_('File save for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File save for %(tag)s failed.', tag=tag))
return jsonify({'result': 'success'})
@bp.route('/app_manage_upload_file', methods=['GET', 'POST'])
@login_required
def app_manage_upload_file():
version_to_package_file = request.files['version_to_package_file']
package_to_function_file = request.files['package_to_function_file']
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
version_to_package_file.save(os.path.join(filePath, 'version2package.json'))
package_to_function_file.save(os.path.join(filePath, 'package2function.json'))
flash(_('Import success!'))
return jsonify({'result': 'success'})
# ---------------------------------------------------------------------------------------
# app manage database configure
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_database_configure')
@login_required
def app_manage_database_configure():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(db.asc(TenantDb.database)).all()
return render_template('app_manage_database_configure.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_database_configure_add', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_add():
form = AddTenantDatabaseForm(None)
if form.validate_on_submit():
current_tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
db.session.add(TenantDb(id=None, hostname=form.host_name.data, driver=form.database_driver.data,
username=form.user_name.data,
password=generate_password_hash(form.user_password.data),
database=form.database_name.data, port=form.database_port.data,
aliasname='_'.join([form.database_driver.data, form.database_name.data]),
type=current_type, tenant_id=current_tenant_id, app_id=session['current_selected_app_id']))
db.session.commit()
flash(_('New tenant database have been added.'))
return redirect(url_for('main.app_manage_database_configure'))
elif request.method == 'GET':
form.app_name.data = session['current_selected_app_name']
form.host_name.data = 'localhost'
form.database_port.data = '3306'
form.database_driver.data = 'mysql'
form.user_name.data = 'root'
pass
return render_template('app_manage_database_configure.html', title=_('Tenant Database Configure'),
tableName=_('Add New Tenant Database'), form=form, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
addTitle=('Add New Tenant Database'))
@bp.route('/app_manage_database_configure_delete/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = TenantDb.query.filter(TenantDb.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(
db.asc(TenantDb.username)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('app_manage_database_configure.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/app_manage_database_configure_delete_select', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.app_manage_database_configure'))
@bp.route('/app_manage_database_configure_edit/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_edit(id):
if session.get('validate_alias_name'):
form = EditTenantDatabaseForm(session['validate_alias_name'])
else:
form = EditTenantDatabaseForm(None)
if form.validate_on_submit():
current_data = TenantDb.query.filter(TenantDb.id == id).first()
current_data.hostname = form.host_name.data
current_data.driver = form.database_driver.data
current_data.username = form.user_name.data
current_data.database = form.database_name.data
current_data.port = form.database_port.data
current_data.aliasname = '_'.join([form.database_driver.data, form.database_name.data])
current_data.type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
current_data.tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_data.app_id = session['current_selected_app_id']
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
db.session.commit()
flash(_('Tenant Database have been edited.'))
return redirect(url_for('main.app_manage_database_configure'))
elif request.method == 'GET':
current_data = TenantDb.query.filter(TenantDb.id == id).first()
form.app_name.data = session['current_selected_app_name']
form.host_name.data = current_data.hostname
form.database_port.data = current_data.port
form.system_extension.data = 'System Extension' if current_data.type == 'system' else 'Not System Extension'
form.database_driver.data = current_data.driver
form.database_name.data = current_data.database
form.user_name.data = current_data.username
form.user_password.description = 'In edit mode, set null in this field means no modification for current password.'
session['validate_alias_name'] = '_'.join([form.database_driver.data, form.database_name.data])
return render_template('app_manage_database_configure.html', title=_('Tenant Database Configure'),
tableName=_('Edit Tenant Database'), form=form,app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
editTitle=('Edit Tenant Database'))
# ---------------------------------------------------------------------------------------
# app manage code configure
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_code_configure', methods=['GET', 'POST'])
@login_required
def app_manage_code_configure():
if session.get('validate_repo'):
form = AddAppCodeForm(session['validate_repo'])
else:
form = AddAppCodeForm(None)
if form.validate_on_submit():
current_data = AppCode.query.filter(AppCode.app_id == session['current_selected_app_id']).first()
current_data.repo = form.code_repo.data
current_data.app_expand_id = AppExpand.query.filter(AppExpand.type == form.app_type.data).first().id
current_data.db_config_path = form.db_config_path.data
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
print(app_id)
# app_id = 'fe01ce2a7fbac8fafaed7c982a04e229'
current_data.remote_login_configure_path = form.remote_login_config_path.data
current_data.remote_login_using_flag = form.remote_login_using_flag.data
# current_data.remote_login_using_content = form.remote_login_using_content.data
if hasattr(form.remote_login_using_content.data, 'filename'):
filename1 = secure_filename(form.remote_login_using_content.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.remote_login_using_content.data.save(filePath1)
current_data.remote_login_using_content = filePath1
form.library_path.data = current_data.library_path
form.filter_package_path.data = current_data.filter_package_path
# form.filter_content.data = current_data.filter_content
form.filter_config_path.data = current_data.filter_configure_path
if hasattr(form.filter_content.data, 'filename'):
filename1 = secure_filename(form.filter_content.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.filter_content.data.save(filePath1)
current_data.filter_content = filePath1
form.filter_import_flag.data = current_data.filter_import_flag
# form.filter_import_content.data = current_data.filter_import_content
form.filter_using_flag.data = current_data.filter_using_flag
# form.filter_using_content.data = current_data.filter_using_content
if hasattr(form.filter_import_content.data, 'filename'):
filename1 = secure_filename(form.filter_import_content.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.filter_import_content.data.save(filePath1)
current_data.filter_import_content = filePath1
if hasattr(form.filter_using_content.data, 'filename'):
filename1 = secure_filename(form.filter_using_content.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.filter_using_content.data.save(filePath1)
current_data.filter_using_content = filePath1
# form.call_starting_point.data = current_data.call_starting_point
# form.third_party_packages.data = current_data.third_party_packages
if hasattr(form.call_starting_point.data, 'filename'):
filename1 = secure_filename(form.call_starting_point.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.call_starting_point.data.save(filePath1)
current_data.call_starting_point = filePath1
if hasattr(form.third_party_packages.data, 'filename'):
filename1 = secure_filename(form.third_party_packages.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.third_party_packages.data.save(filePath1)
current_data.third_party_packages = filePath1
db.session.commit()
flash(_('Code configuration have been edited.'))
return redirect(url_for('main.app_manage_code_configure'))
elif request.method == 'GET':
current_data = AppCode.query.filter(AppCode.app_id == session['current_selected_app_id']).first()
current_extension_data = AppExpand.query.filter(AppExpand.id == current_data.app_expand_id).first()
form.app_type.data = current_extension_data.type
form.code_repo.data = current_data.repo
form.tag_begin.data = current_extension_data.pattern_begin
form.tag_end.data = current_extension_data.pattern_end
form.db_config_path.data = current_data.db_config_path
form.remote_login_config_path.data = current_data.remote_login_configure_path
form.remote_login_using_flag.data = current_data.remote_login_using_flag
# form.remote_login_using_content.data = current_data.remote_login_using_content
form.remote_login_using_content.description = _('Selected File: ') + current_data.remote_login_using_content
form.library_path.data = current_data.library_path
form.filter_package_path.data = current_data.filter_package_path
# form.filter_content.data = current_data.filter_content
form.filter_content.description = _('Selected File: ') + current_data.filter_content
form.filter_config_path.data = current_data.filter_configure_path
form.filter_import_flag.data = current_data.filter_import_flag
# form.filter_import_content.data = current_data.filter_import_content
form.filter_import_content.description = _('Selected File: ') + current_data.filter_import_content
form.filter_using_flag.data = current_data.filter_using_flag
# form.filter_using_content.data = current_data.filter_using_content
form.filter_using_content.description = _('Selected File: ') + current_data.filter_using_content
# form.call_starting_point.data = current_data.call_starting_point
# form.third_party_packages.data = current_data.third_party_packages
form.call_starting_point.description = _('Selected File: ') + current_data.call_starting_point
form.third_party_packages.description = _('Selected File: ') + current_data.third_party_packages
session['validate_repo'] = form.code_repo.data
return render_template('app_manage_code_configure.html', title=_('Edit Code Information'),
tableName=_('Edit Code Information'), form=form, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name())
# ---------------------------------------------------------------------------------------
# app manage mirror list
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_mirror_list')
@login_required
def app_manage_mirror_list():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Mirror Name'), _('Creator'), _('Created Time')]
data = [App.query.order_by(db.asc(App.name)).first()]
return render_template('app_manage_mirror_list.html', title=_('Mirror Manage'),
tableName=_('Mirror List'), AppAdmin=AppAdmin, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_mirror_list_add', methods=['GET', 'POST'])
@login_required
def app_manage_mirror_list_add():
logPath = 'logs/mirror_construct.log'
if os.path.isfile(logPath):
# os.remove(logPath)
pass
new_log_file = open(logPath, 'w', encoding='utf-8')
new_log_file.write('')
new_log_file.flush()
new_log_file.close()
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
current_code = AppCode.query.filter(AppCode.app_id == session['current_selected_app_id']).first()
mirror_construction(current_app._get_current_object(), app_id, current_code)
return jsonify({'code': '0', 'logPath': logPath, 'message': 'Operation done.'})
@bp.route('/app_manage_mirror_list_delete/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_mirror_list_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = TenantDb.query.filter(TenantDb.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(
db.asc(TenantDb.username)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('app_manage_mirror_list.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/app_manage_mirror_list_delete_select', methods=['GET', 'POST'])
@login_required
def app_manage_mirror_list_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.app_manage_mirror_list'))
@bp.route('/app_manage_mirror_list_edit/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_mirror_list_edit(id):
if session.get('validate_alias_name'):
form = EditTenantDatabaseForm(session['validate_alias_name'])
else:
form = EditTenantDatabaseForm(None)
if form.validate_on_submit():
current_data = TenantDb.query.filter(TenantDb.id == id).first()
current_data.hostname = form.host_name.data
current_data.driver = form.database_driver.data
current_data.username = form.user_name.data
current_data.database = form.database_name.data
current_data.port = form.database_port.data
current_data.aliasname = '_'.join([form.database_driver.data, form.database_name.data])
current_data.type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
current_data.tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_data.app_id = session['current_selected_app_id']
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
db.session.commit()
flash(_('Tenant Database have been edited.'))
return redirect(url_for('main.app_manage_mirror_list'))
elif request.method == 'GET':
current_data = TenantDb.query.filter(TenantDb.id == id).first()
form.app_name.data = session['current_selected_app_name']
form.host_name.data = current_data.hostname
form.database_port.data = current_data.port
form.system_extension.data = 'System Extension' if current_data.type == 'system' else 'Not System Extension'
form.database_driver.data = current_data.driver
form.database_name.data = current_data.database
form.user_name.data = current_data.username
form.user_password.description = 'In edit mode, set null in this field means no modification for current password.'
session['validate_alias_name'] = '_'.join([form.database_driver.data, form.database_name.data])
return render_template('app_manage_mirror_list.html', title=_('Tenant Database Configure'),
tableName=_('Edit Tenant Database'), form=form,app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
editTitle=('Edit Tenant Database'))
@bp.route('/get_log', methods=['GET', 'POST'])
@login_required
def get_log():
logStr = ''
data = request.get_json()
if os.path.isfile(data['file']):
logStr = open(data['file']).read()[data['start']:]
pos = data['start'] + len(logStr)
hasMore = True
if 'Operation done.' in logStr:
hasMore = False
return jsonify({'code': '0', 'log': logStr.replace('\n', '<br/>'), 'pos': pos, 'hasMore': hasMore})
else:
print('debug1', data['file'])
print('debug1', os.path.isfile(data['file']))
print('debug1', os.path.exists(data['file']))
return jsonify({'code': '-1', 'message': 'Log file not exist.%s'%(data['file'])})
@bp.route('/remove_log', methods=['GET', 'POST'])
@login_required
def remove_log():
data = request.get_json()
if os.path.isfile(data['file']):
# os.remove(data['file'])
clear_file = open(data['file'], 'w')
clear_file.write('')
clear_file.flush()
clear_file.close()
return jsonify({'code': '0', 'message': 'remove log at %s' % (datetime.utcnow())})
# mirror construction
@async
def mirror_construction(app, app_id, current_code):
with app.app_context() and app.request_context(EnvironBuilder('/','http://localhost/').get_environ()):
# with app.app_context():
remove_log()
logger.info('Operation begin:\n')
logger.info('1.------Reading function package, atomic function data of app------')
#read app function json
tag = 'package2function.json'
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id), tag)
if os.path.isfile(filePath):
json_dict = json.load(open(filePath, encoding='utf-8'))
for a in json_dict:
id = a['id']
if 'file_path' in a['data'] and 'item_pattern' in a['data']:
file_path = a['data']['file_path']
item_pattern = a['data']['item_pattern']
# logger.info('id: %s\nfile_path: %s\nitem_pattern: %s', id, file_path, item_pattern)
logger.info('2.------Pulling code from registry------')
sourceSrcDir = 'F:/code/PPGo_ApiAdmin'
dstSrcDir = 'F:/code/Tenant_PPGo_ApiAdmin'
if os.path.exists(dstSrcDir):
# print('rmtree')
shutil.rmtree(dstSrcDir)
# print('copytree')
shutil.copytree(sourceSrcDir, dstSrcDir)
logger.info('3.------insert tag template in code------')
# 切换工作目录易引起线程安全问题
# old_cwd = os.getcwd()
# os.chdir(dstSrcDir)
args = ''
for a in json_dict:
if 'file_path' in a['data'] and 'item_pattern' in a['data'] and\
a['data']['file_path'] is not '' and a['data']['item_pattern'] is not '':
# filePath = 'F:/test/main.html'
filePath = os.path.join(dstSrcDir, a['data']['file_path']).replace('\\', '/')
# pattern = 'x;/<dd>.*API监控.*<\/dd>/{p;q};/<dd>.*<\/dd>/{x;h;d;ta};/<dd>.*/{x;H;ta};{x;h;d};:a'
pattern = a['data']['item_pattern']
# tag_begin = '{if .role_APIguanli}'
tag_begin = '{{if .role_%s}}' % (a['id'])
tag_end = '{{end}}'
args += 'cat -n %s | sed -n "%s" | { eval $(awk \'NR==1{print "a="$1} END {print "b="$1}\'); ' \
'sed -e "$a i %s" -e "$b a %s" %s;} > F:/temp.txt\n cp F:/temp.txt %s\n' % \
(filePath, pattern, tag_begin, tag_end, filePath, filePath)
shell_file = open('F:/test/temp.sh', 'w', encoding='utf-8')
shell_file.write(args)
shell_file.flush()
shell_file.close()
exec_path = "D:\Program Files\Git\git-bash.exe"
# (status, output) = subprocess.getstatusoutput([exec_path, 'F:/test/temp.sh'])
CREATE_NO_WINDOW = 0x08000000
subprocess.call([exec_path, 'F:/test/temp.sh'], creationflags=CREATE_NO_WINDOW)
# os.chdir(old_cwd)
logger.info('4.------initialing tenant database connection------')
pass
logger.info('5.------extending filter code------')
filter_package_path = os.path.join(dstSrcDir, current_code.filter_package_path).replace('\\', '/')
filter_content = current_code.filter_content
if not os.path.isdir(filter_package_path):
os.makedirs(filter_package_path)
old_filter_file = os.path.join(filter_package_path, os.path.basename(filter_content)).replace('\\', '/')
if os.path.isfile(old_filter_file):
os.remove(old_filter_file)
shutil.copyfile(filter_content, os.path.join(filter_package_path, os.path.basename(filter_content).replace('\\', '/')))
filter_config_path = os.path.join(dstSrcDir, current_code.filter_configure_path).replace('\\', '/')
filter_import_flag = current_code.filter_import_flag
filter_import_content = current_code.filter_import_content
filter_using_flag = current_code.filter_using_flag
filter_using_content = current_code.filter_using_content
with open(filter_config_path, "r", encoding="utf-8") as f:
lines = f.readlines()
with open(filter_config_path, "w", encoding="utf-8") as f_w:
for line in lines:
if filter_import_flag in line:
f_w.write(line)
pre = line[:line.index(filter_import_flag)]
wlines = open(filter_import_content, encoding="utf-8").readlines()
for l in wlines:
f_w.write(pre + l)
# f_w.write(open(filter_import_content, encoding="utf-8").read())
elif filter_using_flag in line:
f_w.write(line)
pre = line[:line.index(filter_using_flag)]
wlines = open(filter_using_content, encoding="utf-8").readlines()
for l in wlines:
f_w.write(pre + l)
# f_w.write(open(filter_using_content, encoding="utf-8").read())
else:
f_w.write(line)
logger.info('6.------extending remote login code------')
remote_login_config_path = os.path.join(dstSrcDir, current_code.remote_login_configure_path)
remote_login_using_flag = current_code.remote_login_using_flag
remote_login_using_content = current_code.remote_login_using_content
with open(remote_login_config_path, "r", encoding="utf-8") as f:
lines = f.readlines()
# 写的方式打开文件
with open(remote_login_config_path, "w", encoding="utf-8") as f_w:
for line in lines:
if remote_login_using_flag in line:
pre = line[:line.index(remote_login_using_flag)]
f_w.write(line)
wlines = open(remote_login_using_content, encoding="utf-8").readlines()
for l in wlines:
f_w.write(pre + l)
else:
f_w.write(line)
# 补充库文件
library_src_path = os.path.join(current_app.config['UPLOAD_FOLDERS']['library_path'],
'go beego\\saas_support')
library_dst_path = os.path.join(os.path.join(dstSrcDir, current_code.library_path), 'saas_support')
# if os.path.exists(library_path):
# # print('rmtree')
# shutil.rmtree(library_path)
# print('copytree')
shutil.copytree(library_src_path, library_dst_path)
logger.info('7.------packing mirror------')
file_handler.setFormatter(blank_formatter) # 改变格式
# subprocess.call([exec_path, 'docker build -t testdocker:v1 %s'%(dstSrcDir)], creationflags=CREATE_NO_WINDOW)
# state, output = subprocess.getstatusoutput('docker build -t testdocker:v1 %s'%(dstSrcDir))
cmd = 'docker build -t reg.silvermaple.com/demo/demo:1.0.0 %s'%(dstSrcDir)
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while p.poll() is None:
line = p.stdout.readline()
line = line.strip()
if line:
logger.info(str(line, encoding = "utf-8"))
if p.returncode == 0:
logger.info('Mirror packed success.')
else:
logger.info('Mirror packed failed.')
file_handler.setFormatter(formatter) # 指定输出格式
logger.info('8.------uploading mirror------')
file_handler.setFormatter(blank_formatter)
cmd = 'docker push reg.silvermaple.com/demo/demo:1.0.0'
# state, output = subprocess.getstatusoutput(cmd)
# logger.info(output)
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while p.poll() is None:
line = p.stdout.readline()
line = line.strip()
if line:
logger.info(str(line, encoding = "utf-8"))
if p.returncode == 0:
logger.info('Mirror uploaded success.')
else:
logger.info('Mirror uploaded failed.')
file_handler.setFormatter(formatter) # 指定输出格式
logger.info('Operation done.')
else:
logger.info('File package2function.json not exist.\nOperation done.')
return jsonify({'code': '-1', 'message': 'File package2function.json not exist.'})
return jsonify({'code': '0', 'message': 'Success'})
# ---------------------------------------------------------------------------------------
# app manage service deploy
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_service_deploy')
@login_required
def app_manage_service_deploy():
isCheck = True
isEdit = True
isDelete = False
session['is_delete'] = 'false'
tHead = [_('ID'), _('Mirror'), _('Instance Number'), _('State'), _('Action')]
action_list = [_('Publish'), _('Adjust'), _('Restart'), _('Stop'), _('Destroy')]
data = [App.query.order_by(db.asc(App.name)).first()]
return render_template('app_manage_service_deploy.html', title=_('Service Deploy'),
tableName=_('Service Container List'), action_list=action_list, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_service_deploy_add', methods=['GET', 'POST'])
@login_required
def app_manage_service_deploy_add():
form = True
if request.method == 'POST':
# if form.validate_on_submit():
# current_tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
# current_type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
# db.session.add(TenantDb(id=None, hostname=form.host_name.data, driver=form.database_driver.data,
# username=form.user_name.data,
# password=generate_password_hash(form.user_password.data),
# database=form.database_name.data, port=form.database_port.data,
# aliasname='_'.join([form.database_driver.data, form.database_name.data]),
# type=current_type, tenant_id=current_tenant_id, app_id=session['current_selected_app_id']))
# db.session.commit()
flash(_('New tenant database have been added.'))
return redirect(url_for('main.app_manage_service_deploy'))
elif request.method == 'GET':
# form.app_name.data = session['current_selected_app_name']
# form.host_name.data = 'localhost'
# form.database_port.data = '3306'
# form.database_driver.data = 'mysql'
# form.user_name.data = 'root'
pass
return render_template('app_manage_service_deploy.html', title=_('Service Deploy'),
tableName=_('Add New Container'), form=form, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
addTitle=('Add New Container'))
@bp.route('/app_manage_service_deploy_delete/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_service_deploy_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = TenantDb.query.filter(TenantDb.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(
db.asc(TenantDb.username)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('app_manage_service_deploy.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/app_manage_service_deploy_delete_select', methods=['GET', 'POST'])
@login_required
def app_manage_service_deploy_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.app_manage_service_deploy'))
@bp.route('/app_manage_service_deploy_edit/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_service_deploy_edit(id):
if session.get('validate_alias_name'):
form = EditTenantDatabaseForm(session['validate_alias_name'])
else:
form = EditTenantDatabaseForm(None)
if form.validate_on_submit():
current_data = TenantDb.query.filter(TenantDb.id == id).first()
current_data.hostname = form.host_name.data
current_data.driver = form.database_driver.data
current_data.username = form.user_name.data
current_data.database = form.database_name.data
current_data.port = form.database_port.data
current_data.aliasname = '_'.join([form.database_driver.data, form.database_name.data])
current_data.type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
current_data.tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_data.app_id = session['current_selected_app_id']
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
db.session.commit()
flash(_('Tenant Database have been edited.'))
return redirect(url_for('main.app_manage_service_deploy'))
elif request.method == 'GET':
current_data = TenantDb.query.filter(TenantDb.id == id).first()
form.app_name.data = session['current_selected_app_name']
form.host_name.data = current_data.hostname
form.database_port.data = current_data.port
form.system_extension.data = 'System Extension' if current_data.type == 'system' else 'Not System Extension'
form.database_driver.data = current_data.driver
form.database_name.data = current_data.database
form.user_name.data = current_data.username
form.user_password.description = 'In edit mode, set null in this field means no modification for current password.'
session['validate_alias_name'] = '_'.join([form.database_driver.data, form.database_name.data])
return render_template('app_manage_service_deploy.html', title=_('Tenant Database Configure'),
tableName=_('Edit Tenant Database'), form=form,app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
editTitle=('Edit Tenant Database'))
# ---------------------------------------------------------------------------------------
# tenant service customize function
# ---------------------------------------------------------------------------------------
@bp.route('/tenant_service_customize_function')
@login_required
def tenant_service_customize_function():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_customize_function.html', title=_('Customized Function'),
tableName=_('Function Root'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/tenant_service_customize_function_edit')
@login_required
def tenant_service_customize_function_edit():
form = True
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('tenant_service_customize_function.html', title=_('Customized Function'),
editTitle=_('Customize'),
tableName=_('Function Root'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, form=form,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/tenant_service_customize_function_save', methods=['GET', 'POST'])
@login_required
def tenant_service_customize_function_save():
data = request.get_json()
tenant_id = Tenant.query.filter(Tenant.id == session['current_tenant_id']).first().tenantid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'], tenant_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
tag = data['tag']
new_json = json.loads(data['json'])
# print(new_json)
# print(tag)
if tag in ['version2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
# new_file.write(json.dumps(new_json))
# json.dump(new_json, new_file, ensure_ascii=False, indent=4)
json.dump(new_json, new_file, indent=4)
new_file.close()
flash(_('File save for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File save for %(tag)s failed.', tag=tag))
return jsonify({'result': 'success'})
@bp.route('/get_tenant_customize_file_path/<tag>', methods=['GET', 'POST'])
@login_required
def get_tenant_customize_file_path(tag):
tenant_id = Tenant.query.filter(Tenant.id == session['current_tenant_id']).first().tenantid
if tag == 'version2function.json':
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'], tenant_id), tag)
if os.path.isfile(filePath):
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function_html'], tenant_id), tag)
return jsonify({'result': 'success', 'filePath': filePath})
# filePath1 = os.path.join(
# current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'], tenant_id)
# if not os.path.isdir(filePath1):
# os.makedirs(filePath1)
# app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
# app_file = os.path.join(os.path.join(
# current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id), 'version2package.json')
# shutil.copyfile(app_file, filePath)
# filePath = os.path.join(os.path.join(
# current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function_html'], tenant_id), tag)
# return jsonify({'result': 'success', 'filePath': filePath})
# flash(_('No customize function now!'))
return jsonify({'result': 'fail', 'filePath': False})
# ---------------------------------------------------------------------------------------
# tenant service customize function
# ---------------------------------------------------------------------------------------
@bp.route('/tenant_service_role_setting')
@login_required
def tenant_service_role_setting():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Role Name'), _('Creator'), _('App Name')]
data = SaasRole.query.order_by(db.asc(SaasRole.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_role_setting.html', title=_('Role List'),
tableName=_('Role List'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/tenant_service_role_setting_allocate/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_allocate(id):
form = True
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
session['current_role_id'] = id
tHead = [_('Role Name'), _('Creator'), _('App Name')]
data = SaasRole.query.order_by(db.asc(SaasRole.name)).all()
role_name = SaasRole.query.filter(SaasRole.id==id).first().name
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_role_setting.html', title=_('Role List'),
tableName=_('Allocate Function'), app_name_list=get_app_name_list(), form=form, role_id=id,
current_selected_app_name=get_current_selected_app_name(), role_name=role_name,
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data, role_tag_prefix='role_')
@bp.route('/tenant_service_role_setting_save', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_save():
data = request.get_json()
role_id = session['current_role_id']
tenant_id = Tenant.query.filter(Tenant.id == session['current_tenant_id']).first().tenantid
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_role_setting'], tenant_id), role_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
tag = data['tag']
new_json = json.loads(data['json'])
print(new_json)
print(tag)
if tag in ['version2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
# new_file.write(json.dumps(new_json))
# json.dump(new_json, new_file, ensure_ascii=False, indent=4)
json.dump(new_json, new_file, indent=4)
new_file.close()
flash(_('File save for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File save for %(tag)s failed.', tag=tag))
return jsonify({'result': 'success'})
@bp.route('/get_role_customize_file_path/<tag>', methods=['GET', 'POST'])
@login_required
def get_role_customize_file_path(tag):
tenant_id = Tenant.query.filter(Tenant.id == session['current_tenant_id']).first().tenantid
role_id = session['current_role_id']
if tag == 'version2function.json':
filePath = os.path.join(os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_role_setting'], tenant_id), role_id), tag)
if os.path.isfile(filePath):
filePath = os.path.join(os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_role_setting_html'], tenant_id), role_id), tag)
return jsonify({'result': 'success', 'filePath': filePath})
return jsonify({'result': 'fail', 'filePath': False})
@bp.route('/tenant_service_role_setting_add', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_add():
form = AddRoleForm(None)
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
if form.validate_on_submit():
db.session.add(SaasRole(id=None, name=form.role_name.data, funcdata_mod_time=datetime.now().__str__()))
db.session.commit()
flash(_('New role have been added.'))
return redirect(url_for('main.tenant_service_role_setting'))
elif request.method == 'GET':
form.creator.data = current_tenant_name
form.app_name.data = get_current_selected_app_name()
pass
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_role_setting.html', title=_('Role List'), form=form,
tableName=_('Add Role'), app_name_list=get_app_name_list(), addTitle=_('Add Role'),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead)
@bp.route('/tenant_service_role_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = SaasRole.query.filter(SaasRole.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Role Name'), _('Creator'), _('App Name')]
data = SaasRole.query.order_by(db.asc(SaasRole.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('tenant_service_role_setting.html', title=_('Role List'),
tableName=_('Role List'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/tenant_service_role_setting_delete_select', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.tenant_service_role_setting'))
@bp.route('/tenant_service_role_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_edit(id):
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
if session.get('validate_role_name'):
form = AddRoleForm(session['validate_role_name'])
else:
form = AddRoleForm(None)
if form.validate_on_submit():
current_data = SaasRole.query.filter(SaasRole.id == id).first()
current_data.name = form.role_name.data
db.session.commit()
flash(_('Role have been edited.'))
return redirect(url_for('main.tenant_service_role_setting'))
elif request.method == 'GET':
current_data = SaasRole.query.filter(SaasRole.id == id).first()
form.role_name.data = current_data.name
form.creator.data = current_tenant_name
form.app_name.data = get_current_selected_app_name()
session['validate_role_name'] = form.role_name.data
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Role Name'), _('Creator'), _('App Name')]
data = SaasRole.query.order_by(db.asc(SaasRole.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
return render_template('tenant_service_role_setting.html', title=_('Role List'), form=form,
tableName=_('Edit Role'), app_name_list=get_app_name_list(), editTitle=_('Edit Role'),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data)
# ---------------------------------------------------------------------------------------
# tenant service customize function
# ---------------------------------------------------------------------------------------
@bp.route('/tenant_service_user_setting')
@login_required
def tenant_service_user_setting():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('User Name'), _('Belonged Role'), _('Creator'), _('App Name')]
data = SaasUser.query.order_by(db.asc(SaasUser.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
return render_template('tenant_service_user_setting.html', title=_('User List'),
tableName=_('User List'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
current_tenant_name=current_tenant_name,
isCheck=isCheck, isEdit=isEdit, SaasRole=SaasRole,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/tenant_service_user_setting_add', methods=['GET', 'POST'])
@login_required
def tenant_service_user_setting_add():
form = AddUserForm(None)
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
if form.validate_on_submit():
role_id = SaasRole.query.filter(SaasRole.name == form.role_list.data).first().id
db.session.add(SaasUser(id=None, name=form.user_name.data,
password=generate_password_hash(form.user_password.data),
role_id=role_id))
db.session.commit()
flash(_('New user have been added.'))
return redirect(url_for('main.tenant_service_user_setting'))
elif request.method == 'GET':
form.creator.data = current_tenant_name
form.app_name.data = get_current_selected_app_name()
pass
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('User Name'), _('Belonged Role'), _('Creator'), _('App Name')]
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_user_setting.html', title=_('User List'), form=form,
tableName=_('Add User'), app_name_list=get_app_name_list(), addTitle=_('Add User'),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead)
@bp.route('/tenant_service_user_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_user_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = SaasUser.query.filter(SaasUser.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('User Name'), _('Belonged Role'), _('Creator'), _('App Name')]
data = SaasUser.query.order_by(db.asc(SaasUser.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('tenant_service_user_setting.html', title=_('User List'),
tableName=_('User List'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data, SaasRole=SaasRole,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/tenant_service_user_setting_delete_select', methods=['GET', 'POST'])
@login_required
def tenant_service_user_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.tenant_service_user_setting'))
@bp.route('/tenant_service_user_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_user_setting_edit(id):
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
if session.get('validate_user_name'):
form = AddUserForm(session['validate_user_name'])
else:
form = AddUserForm(None)
if form.validate_on_submit():
current_data = SaasUser.query.filter(SaasUser.id == id).first()
current_data.name = form.user_name.data
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
current_data.role_id = SaasRole.query.filter(SaasRole.name==form.role_list.data).first().id
db.session.commit()
flash(_('Role have been edited.'))
return redirect(url_for('main.tenant_service_user_setting'))
elif request.method == 'GET':
current_data = SaasUser.query.filter(SaasUser.id == id).first()
form.user_name.data = current_data.name
form.role_list.data = SaasRole.query.filter(SaasRole.id==current_data.role_id).first().name
form.creator.data = current_tenant_name
form.app_name.data = get_current_selected_app_name()
session['validate_user_name'] = form.user_name.data
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('User Name'), _('Belonged Role'), _('Creator'), _('App Name')]
data = SaasUser.query.order_by(db.asc(SaasUser.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
return render_template('tenant_service_role_setting.html', title=_('User List'), form=form,
tableName=_('User List'), app_name_list=get_app_name_list(), editTitle=_('Edit User'),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data)
# ---------------------------------------------------------------------------------------
# remote api service
# ---------------------------------------------------------------------------------------
HTTPMETHOD = {
'GET': "GET",
'POST': "POST",
'PUT': "PUT",
'DELETE': "DELETE",
'PATCH': "PATCH",
'OPTIONS': "OPTIONS",
'HEAD': "HEAD",
'TRACE': "TRACE",
'CONNECT': "CONNECT",
}
ErrMsgs = {
'FAILED': "Failed;",
'NOTFOUND': "Not found;",
'SUCCESS': "Success;",
'UNEXPECTED': "Something unexpected happened;",
'UNAUTHORIZED': "You are not authorized to do that;",
}
class ResponseBaseStruct():
Success = True
Errmsg = ErrMsgs['SUCCESS']
class ResponseStruct(ResponseBaseStruct):
Data = {}
HasMore = False
Next = ''
def obj2json(obj):
return {
"Success": obj.Success,
"Errmsg": obj.Errmsg,
"Data": obj.Data,
"HasMore": obj.HasMore,
"Next": obj.Next
}
@bp.route('/funcData', methods=['GET', 'POST'])
def getFuncData():
# print(appID, tenantID, userName, accessToken)
form = request.form
appID = form['appID']
tenantID = form['tenantID']
data_json = None
dataFile = os.path.join(os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'],
appID), tenantID), 'version2function.json')
if os.path.isfile(dataFile):
data_json = json.load(open(dataFile, 'r', encoding='utf-8'))
rs = ResponseStruct()
rs.Success = True
rs.Errmsg = ErrMsgs['SUCCESS']
rs.Data = {}
rs.Data['ModTime'] = str(time.time())
rs.Data['Info'] = []
for v in data_json:
print(v)
rs.Data['Info'].append({'data': {}, 'id': 'role_' + v['id']})
print(rs.Data)
response = current_app.make_response((json.dumps(rs, default=obj2json), 200))
# response = current_app.make_response((data_json, '200', 'application/json'))
return response
@bp.route('/funcDataCheck', methods=['GET', 'POST'])
def funcDataCheck():
form = request.form
appID = form['appID']
tenantID = form['tenantID']
data_json = None
dataFile = os.path.join(os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'],
appID), tenantID), 'version2function.json')
if os.path.isfile(dataFile):
data_json = json.load(open(dataFile, 'r', encoding='utf-8'))
print(data_json)
response = current_app.make_response("success", 200)
return response |
11,547 | 232a94c70a60cb665a437533b846325d66557d1a | # -*- coding:utf-8 -*-
#!flask/bin/python
import sys
import os
if sys.platform == 'wn32':
pybabel = 'flask\\Scripts\\pybabel'
else:
pybabel = 'flask/bin/pybabel'
os.system(pybabel + ' compile -d app/translations') |
11,548 | 6d2f1f18ba935ef53365076252e9bcb63ebf0b6c | import math
from thread import Thread
from drivers import MPU6050, HTS221
from machine import Pin, I2C
from connection import MessagingService, Wifi, MqttConnection, BudgetManager, config
import ubinascii
import machine
import utime
import ujson
import sys
TOPIC_DEVICE_PACKAGE = 'hcklI67o/device/{}/package'
TOPIC_TEMPERATURE_SETPOINT = 'hcklI67o/package/{}/setpoint/temperature'
TOPIC_HUMIDITY_SETPOINT = 'hcklI67o/package/{}/setpoint/humidity'
TOPIC_MOTION_SETPOINT = 'hcklI67o/package/{}/setpoint/motion'
TOPIC_TEMPERATURE_PUBLISH = 'hcklI67o/package/{}/temperature'
TOPIC_HUMIDITY_PUBLISH = 'hcklI67o/package/{}/humidity'
TOPIC_MOTION_PUBLISH = 'hcklI67o/package/{}/motion'
class PackageMonitor:
def __init__(self, mqtt: MqttConnection, messaging: MessagingService, budget_manager: BudgetManager):
self.mqtt = mqtt
self.messaging = messaging
self.budget_manager = budget_manager
self.environment_thread = Thread(self.__run_environment, "EnvironmentThread")
self.motion_thread = Thread(self.__run_motion, "MotionThread")
self.temperature_setpoint = config.get_float("temperature_setpoint")
self.humidity_setpoint = config.get_float("humidity_setpoint")
self.motion_setpoint = config.get_float("motion_setpoint")
self.last_z = 0
self.last_y = 0
self.last_x = 0
self.environment_sensor = HTS221(I2C(-1, Pin(26, Pin.IN), Pin(25, Pin.OUT)))
self.motion_sensor = MPU6050(I2C(-1, Pin(26, Pin.IN), Pin(25, Pin.OUT)))
def __subscribe_package_id(self):
self.mqtt.subscribe(TOPIC_DEVICE_PACKAGE.format(self.mqtt.device_id), self._on_package_id, 1)
self.messaging.notify()
def _on_package_id(self, topic, package_id):
# We should unsubscribe from the old id, but umqttsimple does not support this!
self.mqtt.subscribe(TOPIC_TEMPERATURE_SETPOINT.format(package_id), self._on_temperature_setpoint, 1)
self.mqtt.subscribe(TOPIC_HUMIDITY_SETPOINT.format(package_id), self._on_humidity_setpoint, 1)
self.mqtt.subscribe(TOPIC_MOTION_SETPOINT.format(package_id), self._on_motion_setpoint, 1)
self.messaging.notify()
def start(self):
self.__subscribe_package_id()
self.environment_thread.start()
self.motion_thread.start()
def __run_environment(self, thread: Thread):
while thread.active:
did_transmit = False
if self.temperature_setpoint:
if self.__check_temperature():
did_transmit = True
if self.humidity_setpoint:
if self.__check_humidity():
did_transmit = True
if did_transmit:
print("Notified messaging service of pending data")
self.messaging.notify()
utime.sleep(10)
def __run_motion(self, thread: Thread):
while thread.active:
did_transmit = False
if self.motion_setpoint:
if self.__check_motion():
did_transmit = True
if did_transmit:
print("Notified messaging service of pending data")
self.messaging.notify()
utime.sleep_ms(100)
def __check_temperature(self) -> bool:
temperature = self.environment_sensor.read_temp()
print("Read temperature: {}".format(temperature))
if temperature > self.temperature_setpoint:
time = utime.localtime()
self.budget_manager.enqueue(TOPIC_TEMPERATURE_PUBLISH.format(self.messaging.package_id), time, temperature, self.temperature_setpoint)
return True
return False
def __check_humidity(self) -> bool:
humidity = self.environment_sensor.read_humi()
print("Read humidity: {}".format(humidity))
if humidity > self.humidity_setpoint:
time = utime.localtime()
self.budget_manager.enqueue(TOPIC_HUMIDITY_PUBLISH.format(self.messaging.package_id), time, humidity, self.humidity_setpoint)
return True
return False
def __check_motion(self) -> bool:
values = self.motion_sensor.get_values()
z = values['AcZ']
y = values['AcY']
x = values['AcX']
motion = math.fabs(z + y + x - self.last_z - self.last_y - self.last_x)
self.last_z = z
self.last_y = y
self.last_x = x
if motion > self.motion_setpoint: #20000
time = utime.time()
self.budget_manager.enqueue(TOPIC_MOTION_PUBLISH.format(self.messaging.package_id), time, motion, self.motion_setpoint)
return True
return False
def _on_temperature_setpoint(self, topic, msg):
self.temperature_setpoint = float(msg)
# We can't do this, because of the stupid recursion depth
# config.set_value("temperature_setpoint", self.temperature_setpoint)
print('Received temperature setpoint {}'.format(msg))
def _on_humidity_setpoint(self, topic, msg):
self.humidity_setpoint = float(msg)
# We can't do this, because of the stupid recursion depth
# config.set_value("humidity_setpoint", self.humidity_setpoint)
print('Received humidity setpoint {}'.format(msg))
def _on_motion_setpoint(self, topic, msg):
self.motion_setpoint = float(msg)
# We can't do this, because of the stupid recursion depth
# config.set_value("motion_setpoint", self.motion_setpoint)
print('Received motion setpoint {}'.format(msg))
|
11,549 | 53c13809873ddd7b81636dbcaef3fac390c6f1c9 | #!/usr/bin/env python
"""
point_density.py
Calculate density of point data as a raster surface.
Each raster cell contains a value indicating
the number of points contained in the cell.
To get decent performance on large vector datasets, the input vector dataset must
have a gdal-recognized spatial index (ie a .qix file for shapefiles as created by shtree)
Author: Matthew T. Perry
License: You are free to use or modify this code for any purpose.
This license grants no warranty of any kind, express or implied.
"""
import ogr
import sys
import Numeric
import gdal
def getOpts():
poly_ds = "/home/perry/data/world_cities/cities.shp"
poly_lyr = 0
extent = [-180., -90., 180., 90.]
cellsize = 1
outfile = "/home/perry/Desktop/test2.tif"
format = "GTiff"
return (poly_ds,poly_lyr,extent,cellsize,outfile,format)
if __name__ == "__main__":
# Get the inputs
(poly_ds,poly_lyr,extent,cellsize,outfile,format) = getOpts()
# Get the input layer
ds = ogr.Open(poly_ds)
lyr = ds.GetLayer(poly_lyr)
# TODO: Confirm dataset is point and extents overlap
ydist = extent[3] - extent[1]
xdist = extent[2] - extent[0]
xcount = int(xdist/cellsize)
ycount = int(ydist/cellsize)
# Create output raster
driver = gdal.GetDriverByName( format )
dst_ds = driver.Create( outfile, xcount, ycount, 1, gdal.GDT_Float32 )
# the GT(2) and GT(4) coefficients are zero,
# and the GT(1) is pixel width, and GT(5) is pixel height.
# The (GT(0),GT(3)) position is the top left corner of the top left pixel
gt = (extent[0],cellsize,0,extent[3],0,(cellsize*-1.))
dst_ds.SetGeoTransform(gt)
dst_band = dst_ds.GetRasterBand(1)
dst_band.SetNoDataValue( -9999 )
pixelnum = 0
for ypos in range(ycount):
# Create output line array
outArray = Numeric.zeros( (1, xcount) )
for xpos in range(xcount):
# create a 4-item list of extents
minx = xpos * cellsize + extent[0]
maxy = extent[3] - ypos * cellsize
miny = maxy - cellsize
maxx = minx + cellsize
# Create Polygon geometry from BBOX
wkt = 'POLYGON ((%f %f, %f %f, %f %f, %f %f, %f %f))' \
% (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny)
g = ogr.CreateGeometryFromWkt(wkt)
# Set spatial filter
lyr.SetSpatialFilter(g)
numfeatures = lyr.GetFeatureCount()
#print wkt, numfeatures
#Assign the number of features in the bbox
# as value in line array
#
# NOTE: this is where one should test for an
# actual intersection with the geos function
# (ie loop through features
# and ensure that the select features
# actually intersect the cell geometry g)
Numeric.put( outArray, xpos, numfeatures )
lyr.ResetReading()
pixelnum += 1
print '%.2f pct complete' % (float(pixelnum)/(xcount*ycount) * 100.)
dst_band.WriteArray(outArray,0,ypos)
|
11,550 | 34c4433ad4b7bff9670e11ee1c312c3184082960 | import random
import turtle
class Maze(object):
"""docstring for Maze"""
class Cell(object):
"""docstring for Cell"""
def __init__(self):
self.edges = []
self.visited = False
def link(self, neighbor):
newedge = Maze.Edge(self, neighbor)
self.edges.append(newedge)
neighbor.edges.append(newedge)
def unvisitedEdges(self):
new_edges = []
for edge in self.edges:
if not (edge.follow(self).visited):
new_edges.append(edge)
return new_edges
class Edge(object):
"""docstring for edge"""
def __init__(self, vert1, vert2):
self.walled = True
self.v1 = vert1
self.v2 = vert2
def follow(self, vert):
if vert == self.v1:
return self.v2
if vert == self.v2:
return self.v1
return None
def __init__(self, size):
self.size = size
self.cells = []
self.populate()
self.start = None
self.end = None
def populate(self):
for x in range(self.size):
for y in range(self.size):
index = x * self.size + y
self.cells.append(Maze.Cell())
if x != 0:
self.cells[index].link(self.cells[index - self.size])
if y != 0:
self.cells[index].link(self.cells[index - 1])
def numberofcells(self):
return len(self.cells)
def hasUnvisited(self):
for cell in self.cells:
if cell.visited == False:
return True
return False
def construct(maze):
path = []
current_cell = maze.cells[0]
current_cell.visited = True
maze.start = current_cell
longest_path = 0
end_cell = None
while maze.hasUnvisited():
walled_paths = current_cell.unvisitedEdges()
if len(walled_paths) != 0:
next_corridor = random.choice(walled_paths)
path.append(current_cell)
next_corridor.walled = False
current_cell = next_corridor.follow(current_cell)
current_cell.visited = True
elif len(path) != 0:
if len(path) > longest_path:
longest_path = len(path)
end_cell = current_cell
current_cell = path.pop()
if len(path) > longest_path:
print("fixed")
longest_path = len(path)
end_cell = current_cell
maze.end = end_cell
def displayMaze(maze, xstart, ystart, length):
turtle.up()
def vertical(col):
turtle.goto(xstart + length * (col + 1), ystart)
turtle.seth(-90)
for x in range(0, maze.size):
if x == maze.size - 1:
status = maze.cells[x * maze.size + col].edges[-1].walled
else:
status = maze.cells[x * maze.size + col].edges[-2].walled
if status:
turtle.down()
else:
turtle.up()
turtle.forward(length)
turtle.up()
def horizontal(row):
turtle.goto(xstart, ystart - length * (row + 1))
turtle.seth(0)
for x in range(0, maze.size):
status = maze.cells[row * maze.size + x].edges[-1].walled
if status:
turtle.down()
else:
turtle.up()
turtle.forward(length)
turtle.up()
def drawExit(row, col):
turtle.goto(xstart + col * length, ystart - row * length)
turtle.down()
turtle.begin_fill()
for x in range(0, 4):
turtle.forward(length)
turtle.right(90)
turtle.end_fill()
turtle.up()
for row in range(maze.size):
for col in range(maze.size):
if maze.cells[row * maze.size + col] == maze.end:
turtle.color("white", "red")
drawExit(row, col)
turtle.color("black")
if maze.cells[row * maze.size + col] == maze.start:
turtle.color("white", "green")
drawExit(row, col)
turtle.color("black")
turtle.goto(xstart, ystart)
turtle.seth(0)
turtle.down()
for x in range(0, 4):
turtle.forward(length * maze.size)
turtle.right(90)
turtle.up()
for i in range(0, maze.size - 1):
vertical(i)
for j in range(0, maze.size - 1):
horizontal(j)
turtle.up()
turtle.goto(xstart, ystart)
testmaze = Maze(10)
construct(testmaze)
testlen = 10
turtle.speed(0)
displayMaze(testmaze, testmaze.size * testlen / -2, testmaze.size * testlen / 2, testlen)
turtle.done()
|
11,551 | 700befadf7deb736d5f5e1267effc464de38e0ba | # Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensor utilities to evaluate symmetry and eventually create dense storage."""
from collections import deque
from typing import Any, Deque, List, Tuple
import numpy
def build_symmetry_operations(symmetry: List[Any]) -> None:
"""Take the list of allowed permutations and build the symmetry operations
allowed for the operation, modifying `symmetry` in place.
Args:
symmetry (List): A list containing all the allowed permutations.
"""
dim = len(symmetry[0][0])
unit = numpy.identity(dim, dtype=int)
for permutation in symmetry:
perm = unit[:, numpy.argsort(permutation[0])]
permutation[0] = perm
def confirm_symmetry(mat: numpy.ndarray, symmetry: List[Any]) -> None:
"""Digest the allowed permutations to validate the underlying structure.
Args:
mat (numpy.ndarray): Matrix to confirm symmetry in.
symmetry (List): A list containing all the information regarding \
symmetry of the matrix. The first element can be an identity \
element with the indices in order, a parity of 1.0 and no complex \
conjugation. Each entry should specify the action of the symmetry \
on the indexes, a parity associated with the permutation and \
whether the term should be conjugated. The first term should be \
the unity operation.
"""
is_unity = validate_unity(symmetry[0])
if len(symmetry) == 1 and is_unity:
return
build_symmetry_operations(symmetry)
validate_matrix_symmetry(mat, symmetry)
def index_queue(dim: int, highest_index: int) -> Deque[Tuple[int, ...]]:
"""Generate all index pointers into the matrix of interest.
Args:
dim (int): The size of the matrix of interest.
highest_index (int): The maximum value allowable in the matrix.
Returns:
Deque[Tuple[int, ...]]: A queue containing all possible pointers \
into the matrix.
"""
i_queue: List[Tuple[int, ...]] = []
if dim == 1:
for i in range(highest_index):
i_queue.append(tuple([i]))
else:
total = highest_index**dim - 1
ticker = [0 for _ in range(dim)]
i_queue.append(tuple(ticker))
for _ in range(total):
for i in reversed(range(dim)):
ticker[i] += 1
if ticker[i] < highest_index:
break
ticker[i] = 0
i_queue.append(tuple(ticker))
return deque(i_queue)
def validate_matrix_symmetry(matrix: numpy.ndarray,
symmetry: List[Any],
threshold: float = 1.0e-8) -> None:
"""Go through every element of the matrix and check that the symmetry
operations are valid up to a threshold.
Args:
matrix (numpy.ndarray): A matrix of interest.
symmetry (List): Symmetry that should be validated.
threshold (float): The limit at which a symmetry operation is valid.
Raises:
ValueError: If there is an error with a symmetry in a permutation.
"""
all_index = index_queue(len(matrix.shape), matrix.shape[0])
while all_index:
index = all_index.popleft()
value = matrix[index]
for permu in symmetry[1:]:
test_index = tuple(numpy.dot(index, permu[0]))
test_value = matrix[test_index]
if permu[2]:
ref_value = permu[1] * numpy.conj(value)
else:
ref_value = permu[1] * value
if numpy.abs(test_value - ref_value) > threshold:
raise ValueError("Error with symmetry in permutation {} -> {}."
" {} != {}".format(index, test_index,
ref_value, test_value))
try:
all_index.remove(test_index)
except ValueError:
pass
def validate_unity(unity_permutation: List[Any]) -> bool:
"""Checks that the inumpyut permutation is the unity permutation, i.e., an
object s of type List[List[int], float, bool] such that
* s[0][0] > -1,
* s[0][i] < s[0][j] for i < j,
* s[1] = 1.0, and
* s[2] = False.
Args:
unity_permutation (list): A permutation object to compare to the unity
operation.
Returns:
(bool): True if the inumpyut permutation is the unity permutation, \
else False.
Raises:
ValueError: If the inumpyut permutation is invalid.
"""
if unity_permutation[1] != 1.0:
raise ValueError("The unity permutation does not have a phase of 1.0")
if unity_permutation[2]:
return False
static = unity_permutation[0]
lowlimit = -1
for index in static:
if index < lowlimit:
raise ValueError("The first entry is not unity")
lowlimit = index
return True
|
11,552 | e7454e475aa1ebebbd3ea344c2157cedb4fe3373 | #!/usr/bin/env python
# coding=utf-8
from __future__ import print_function
import unittest
import os
import tempfile
from mock import Mock, patch
from click.testing import CliRunner
from shub import deploy_reqs
@patch('shub.deploy_reqs.utils.build_and_deploy_egg')
class TestDeployReqs(unittest.TestCase):
VALID_APIKEY = '1234'
def setUp(self):
self.runner = CliRunner()
def cli_run(self, cli, args):
return self.runner.invoke(cli, args, env={'SHUB_APIKEY': self.VALID_APIKEY})
def test_can_decompress_downloaded_packages_and_call_deploy_reqs(self, deploy_egg_mock):
# GIVEN
requirements_file = self._write_tmp_requirements_file()
with self.runner.isolated_filesystem():
# WHEN
result = self.cli_run(deploy_reqs.cli, ["-p -1", requirements_file])
# THEN
self.assertEqual(2, deploy_egg_mock.call_count, self.error_for(result))
def test_uses_project_id_from_scrapy_cfg_per_default(self, deploy_egg_mock):
requirements_file = self._write_tmp_requirements_file()
with self.runner.isolated_filesystem():
# GIVEN
self.write_valid_scrapy_cfg()
# WHEN I don't provide the project id
self.cli_run(deploy_reqs.cli, [requirements_file])
# THEN It uses the project id in the scrapy.cfg file
deploy_egg_mock.assert_called_with('-1', self.VALID_APIKEY)
def _write_tmp_requirements_file(self):
basepath = 'tests/samples/deploy_reqs_sample_project/'
eggs = ['other-egg-0.2.1.zip', 'inflect-0.2.5.tar.gz']
tmp_dir = tempfile.mkdtemp(prefix="shub-test-deploy-reqs")
requirements_file = os.path.join(tmp_dir, 'requirements.txt')
with open(requirements_file, 'w') as f:
for egg in eggs:
f.write(os.path.abspath(os.path.join(basepath, egg)) + "\n")
return requirements_file
def write_valid_scrapy_cfg(self):
valid_scrapy_cfg = """
[deploy]
username = API_KEY
project = -1
[settings]
default = project.settings
"""
with open('scrapy.cfg', 'w') as f:
f.write(valid_scrapy_cfg)
def error_for(self, result):
return '\nOutput: %s.\nException: %s' % (result.output.strip(), repr(result.exception))
if __name__ == '__main__':
unittest.main()
|
11,553 | 19aff83506523dcaf850040fbd055c1d49162931 | from flask import Flask,views,jsonify,request,make_response,json,g,redirect,abort,g,current_app
from ..models import Comment,Post,Tag,Email,AppUser as User,PublicUser
from itsdangerous import TimedJSONWebSignatureSerializer as signer
from ..app_factory import get_app
from ..cache import set_cache,get_cache,make_secret_key,get_key,cache_response,check_cache
api = get_app('api',is_bp=True,static_folder='static',url_prefix='/api/v1')
#check_cache = api.before_request(check_cache)
#cache_response = api.after_request(cache_response)
def get_data():
print 'here'
return json.loads(request.data) if request.data else dict(request.form.items())
def json_response(content):
res = make_response(json.dumps(content))
res.set_cookie('SECRET_KEY',current_app.config.get('SECRET_KEY'))
res.headers['Content-Type'] = 'application/json'
return res
def load_user(tkn):
try:
data = g.get('signer').loads(tkn)
except:
return redirect('/login')
return AppUser.get_by_id(data['id'])
@api.before_request
def check_auth():
g.signer = signer(current_app.config['SECRET_KEY'],60*60*24*7)
if request.cookies.get('NGAPP_AUTH_TKN'):
g.user = load_user(request.cookies.get('NGAPP_AUTH_TKN'))
print g.user
@api.before_request
def cache_response():
print request.path
print get_key(request.path)
class TagView(views.MethodView):
def get(self,tag_id=None):
if tag_id is None:
return json_response([dict(name=x.name,description=x.description,id=x.id) for x in Tag.get_all()])
else:
tag = Tag.get_by_id(tag_id)
if tag:
return json_response(dict(name=tag.name,description=tag.description,id=tag.id))
return json_response(['error']),404
class AddTagView(views.MethodView):
def post(self):
if 'application/json' in request.headers['Content-Type']:
tag = Tag(**json.loads(request.data))
else:
tag = Tag(**dict(request.form.items()))
tag.save()
return json_response(dict(name=tag.name,description=tag.description,id=tag.id))
class PostListView(views.MethodView):
def get(self,user_id=None,tag_id=None):
if user_id is not None:
return json_response([x.to_json() for x in User.get_by_id(user_id).posts]),200
def post(self,user_id=None):
user_id = user_id or (g.get('user') and getattr(g.get('user'),'id') or None)
if user_id is not None:
return json_response([x.to_json() for x in User.get_by_id(user_id).posts]),200
class PostView(views.MethodView):
def get(self,post_id=None):
if post_id is None:
return json_response(map(lambda x: x.to_json(),Post.get_all()))
return jsonify(Post.get_by_id(post_id).to_json())
def post(self,post_id=None):
#print request.data
#print request.json
if 'application/json' in request.headers['Content-Type']:
print json.loads(request.data)
data = json.loads(request.data)
p = Post.query.filter_by(title=data.get('title')).first()
print '---->>>>',p
post =\
Post.query.filter_by(title=json.loads(request.data).get('title')).first() or\
(
Post.get_by_id(post_id) if\
post_id is not None else\
Post(**json.loads(request.data)
)
)
else:
print dict(request.form.items())
post = Post.get_by_id(post_id) if post_id is not None else Post(**dict(request.form.items()))
post.update()
return json_response(post.to_json())
class DeletePostView(views.MethodView):
def post(self,post_id):
result = [dict(success=False),404]
post = Post.get_by_id(post_id)
if post is not None:
post.delete()
result[0]['success'] = True
result[1] = 200
return jsonify(result[0]),result[1]
def delete(self,post_id):
result = [dict(success=False),404]
post = Post.get_by_id(post_id)
if post is not None:
post.delete()
result[0]['success'] = True
result[1] = 200
return jsonify(result[0]),result[1]
class LoginView(views.MethodView):
def post(self):
data = json.loads(request.data) if request.data else dict(request.form.items())
open('log3','w').write(json.dumps(data))
email = Email.query.filter_by(address=data.get('email')).first()
if email is None:
return json_response({'error':'email does not exist'}),401
else:
if email.user.check_password(data.get('password')):
tkn = g.signer.dumps(email.user.to_json())
response = make_response(json.dumps(dict(token=tkn)))
response.headers['Content-Type'] = 'application/json'
response.set_cookie('NGAPP_AUTH_TKN',tkn,expires=60*60*24)
return response
else:
return json_response({'error':"incorrect login"}),403
class AddCommentView(views.MethodView):
def post(self):
return json_response(Comment.get_new(**get_data()).to_json())
class RegisterUserView(views.MethodView):
def post(self):
if 'application/json' in request.headers['Content-Type']:
data = json.loads(request.data)
else:
data = dict(request.form.items())
print data
if not user_exists(data['username']):
if not email_exists(data['email']):
rtn = json_response(User.get_new(**data).to_json())
else:
rtn = json_response(dict(error=True,message="email in use"))
else:
rtn = json_response(dict(error=True,message="username in use"))
return rtn,200
class AddPublicUserView(views.MethodView):
def post(self):
data = get_data()
print data
open('log2','w').write(json.dumps(data))
if not email_exists(data.get('email')):
rtn = json_response(PublicUser.get_new(**data).to_json())
else:
rtn = json_response(dict(error=True,message="email in use"))
return rtn,200
def user_exists(username,public=False):
user = ((public and PublicUser) or User).query.filter_by(username=username).first()
return user is not None
def email_exists(email):
return Email.query.filter(Email.address==email).first() is not None
api.add_url_rule('/post','get_posts',view_func=PostView.as_view('get_posts'))
api.add_url_rule('/post/<int:post_id>','get_post',view_func=PostView.as_view('get_post'))
api.add_url_rule('/post/delete/<int:post_id>','delete_post',view_func=DeletePostView.as_view('delete_post'))
api.add_url_rule('/post/<int:post_id>','int_delete_post',view_func=DeletePostView.as_view('int_delete_post'))
api.add_url_rule('/tag','get_tags',view_func=TagView.as_view('get_tags'))
api.add_url_rule('/tag/<int:tag_id>','get_tag',view_func=TagView.as_view('get_tag'))
api.add_url_rule('/tag/add','add_tag',view_func=AddTagView.as_view('add_tag'))
api.add_url_rule('/login','login',view_func=LoginView.as_view('login'))
api.add_url_rule('/comment/add','add_comment',view_func=AddCommentView.as_view('add_comment'))
api.add_url_rule('/user/add','add_user',view_func=RegisterUserView.as_view('add_user'))
api.add_url_rule('/user/<int:user_id>/posts','user_posts',view_func=PostListView.as_view('user_posts'))
api.add_url_rule('/user/posts','user__posts',view_func=PostListView.as_view('user__posts'))
api.add_url_rule('/public/add','add_public',view_func=AddPublicUserView.as_view('add_public'))
if __name__ == "__main__":
api.run(host='0.0.0.0',port=8000,debug=True)
|
11,554 | 13b87794d1426239ac22f3c5eb8fb12641e81a16 | __author__ = "Zhijie Huang"
import time
def f(a):
if a % 2 == 0:
return a // 2
else:
return 3 * a + 1
def count(b, k):
steps = [-1 for i in range(b+1)]
steps[1] = 0
for i in range(2, b):
if steps[i] == -1:
path = []
l = i
for j in range(k):
path.append(l)
l = f(l)
if l < b+1 and steps[l] != -1:
break
len_path = len(path)
if l < b+1 and steps[l] != -1:
for j in range(len_path):
if path[len_path - 1 - j] < b+1:
steps[path[len_path - 1 - j]] = steps[l] + j + 1
return steps
start = time.time()
k = 200
b = 10000000
result = count(b,k)
count = 0
print(time.time() - start)
for i in result:
if i >= 0 and i <= k:
count += 1
print(count)
print(time.time() - start) |
11,555 | 85ab6a97e44d247b3cca4663dcd9d02b256b333f | from fabric.context_managers import cd
from fabric.api import run, put, env
from cloudify import ctx
#env.use_ssh_config = True
def wf_deploy(wf_url, wf_name):
ctx.logger.info('workflow download ' + wf_url)
run('mkdir RawaWF')
run('git clone --recursive ' + wf_url + ' RawaWF/' + wf_name)
with cd('RawaWF/' + wf_name):
run('echo $PWD')
run('. ./Picard-deploy.sh')
def container(blueprint, image, container_name):
ctx.logger.info('container creation')
run('if [ ! -d ~/' + blueprint + ' ]; then mkdir ~/' + blueprint + '; fi')
run('sudo docker run -P --name '+ container_name+ ' -v ~/' + blueprint + ':/root/' + blueprint + ' -it -d ' + image + ' bin/bash')
def java(container_name):
ctx.logger.info('java installation')
Java = run('sudo docker exec -it ' + container_name +' which java')
if not Java:
run('sudo docker exec -it ' + container_name + ' apt-get update')
run('sudo docker exec -it ' + container_name + ' apt-get -y install default-jre')
def get_input(blueprint, container_name):
file_name = 'file1.txt' #ctx.node.properties.Source
#sourcefile = expanduser("~") + '/input/' + file_name
#run('sudo docker exec -it ' + container_name + ' [ ! -d ' + blueprint + '/' +' ] && sudo docker exec -it ' + container_name + ' mkdir ' + blueprint)
ctx.logger.info('copy the input')
#filename = basename(sourcefile)
#run('cat ' + blueprint + '/' + sourcefile + ' | docker exec -i ' + container_name + ' sh -c cat > /root/' + blueprint + '/' +filename)
def block_deploy(blueprint, container_name, block_url):
block = ctx.node.name
block_name = ctx.node.properties['block_name']
run('sudo docker exec -it ' + container_name + ' [ ! -d ' + blueprint + ' ] && sudo docker exec -it ' + container_name + ' mkdir ' + blueprint)
run('sudo docker exec -it ' + container_name + ' [ ! -f ' + blueprint + '/' + block_name + ' ] && sudo docker exec -it ' + container_name + ' wget -O ' + blueprint + '/' + block_name + ' ' + block_url)
ctx.logger.info('Execute the block')
run('sudo docker exec -it ' + container_name + ' java -jar ' + blueprint + '/' + block_name + ' ' + blueprint + ' ' + block)
|
11,556 | cfac6a17bb3bde9434753394e21819819a53474e | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from luther_app.views import index, overview, preface, intro, text, images, works, page, xml, send_file
urlpatterns = patterns('luther_app.views',
url(r'^$', 'index', name='index'),
url(r'^overview', 'overview', name='overview'),
url(r'^preface', 'preface', name='preface'),
url(r'^intro$', 'intro', name='intro'),
url(r'^text$', 'text', name='text'),
url(r'^xml$', 'xml', name='xml'),
url(r'^images$', 'images', name='images'),
url(r'^works$', 'works', name='works'),
url(r'^(?P<filename>[^/]+)$', 'page', name='page'),
url(r'^text/download$', 'send_file', name='send_file'),
)
if settings.DEBUG:
urlpatterns += patterns(
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT } ),
)
|
11,557 | 863a109c4c960601938e24aec6fc7fb4f988f0d4 | import pygame
import math
pygame.init()
win_height = 480
win_width = 500
win = pygame.display.set_mode((win_width, win_height))
pygame.display.set_caption('First Game')
x = 50
y = 400
width = 64
height = 64
vel = 5
run = True
is_jump = False
jump_count = 10
left = False
right = False
walk_count = 0
walk_right = [pygame.image.load('R1.png'), pygame.image.load('R2.png'), pygame.image.load('R3.png'), pygame.image.load('R4.png'), pygame.image.load('R5.png'), pygame.image.load('R6.png'), pygame.image.load('R7.png'), pygame.image.load('R8.png'), pygame.image.load('R9.png')]
walk_left = [pygame.image.load('L1.png'), pygame.image.load('L2.png'), pygame.image.load('L3.png'), pygame.image.load('L4.png'), pygame.image.load('L5.png'), pygame.image.load('L6.png'), pygame.image.load('L7.png'), pygame.image.load('L8.png'), pygame.image.load('L9.png')]
bg = pygame.image.load('bg.jpg')
char = pygame.image.load('standing.png')
clock = pygame.time.Clock()
def jump_fun(jc):
return (jc**2) * 0.5
def redraw_game_window():
global walk_count
win.blit(bg, (0, 0))
if walk_count + 1 >= 27:
walk_count = 0
if left:
win.blit(walk_left[walk_count // 3], (x, y))
walk_count += 1
elif right:
win.blit(walk_right[walk_count // 3], (x, y))
walk_count += 1
else:
win.blit(char, (x, y))
# win.fill((0, 0, 0))
#pygame.draw.rect(win, (255, 0, 0), (x, y, width, height))
pygame.display.update()
# main game loop
while run:
# pygame.time.delay(80)
clock.tick(27)
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT] and x > vel:
x -= vel
left = True
right = False
elif keys[pygame.K_RIGHT] and x < (win_width - width - vel):
x += vel
left = False
right = True
else:
left = False
right = False
walk_count = 0
if not is_jump:
#if keys[pygame.K_UP] and y > vel:
# y -= vel
#if keys[pygame.K_DOWN] and y < (win_height - height - vel):
# y += vel
top_margin = sum([math.floor(jump_fun(i)) for i in range(1, jump_count + 1)])
if keys[pygame.K_SPACE] and top_margin < y < (win_height - height):
is_jump = True
right = False
left = False
walk_count = 0
else:
if jump_count >= -10 and y < (win_height - height):
neg = 1
if jump_count < 0:
neg = -1
y -= jump_fun(jump_count) * neg
jump_count -= 1
else:
is_jump = False
jump_count = 10
redraw_game_window()
pygame.quit()
|
11,558 | bd26b141ca88847a2f31b3889d9bc93d68f4c84b | from datetime import timedelta, datetime
#from datetime import datetime
from decimal import Decimal
from pprint import pprint
from django.conf import settings
from django.core.mail import send_mail
from celery.task import task
from celery.task import periodic_task
from celery.task.schedules import crontab
from dbtrade.apps.trader.models import TickerHistory, EmailNotice, EmailNoticeLog
from dbtrade.apps.trader.utils.my_api_client import API, CB_API
from dbtrade.apps.trader.utils.utils import auto_trade
from dbtrade.utils.apiclient import get_bitstamp_ticker
@task(ignore_results=True, name='dbtrade.apps.trader.tasks.trader')
def trader(ticker_id):
current_ticker = TickerHistory.objects.get(id=ticker_id)
auto_trade(current_ticker, buy_mode='hold', sell_mode='hold')
@periodic_task(run_every=timedelta(seconds=600), ignore_result=True)
def ticker_save(*args, **kwargs):
res = API.get_ticker()
if res['result'] == 'success':
ticker_data = res['data']
cb_buy_value = CB_API.buy_price(1)
cb_buy_value_50 = CB_API.buy_price(50)
bs_ticker = get_bitstamp_ticker()
print 'bs_ticker= %s' % str(bs_ticker)
print 'Saving ticker data!'
#print ticker_data
ticker_history = TickerHistory(volume=Decimal(ticker_data['vol']['value']),
weighted_average_value=Decimal(ticker_data['vwap']['value']),
weighted_average_value_int=int(ticker_data['vwap']['value_int']),
average_value=Decimal(ticker_data['avg']['value']),
average_value_int=int(ticker_data['avg']['value_int']),
last_value=Decimal(ticker_data['last']['value']),
last_value_int=int(ticker_data['last']['value_int']),
high_value=Decimal(ticker_data['high']['value']),
high_value_int=int(ticker_data['high']['value_int']),
low_value=Decimal(ticker_data['low']['value']),
low_value_int=int(ticker_data['low']['value_int']),
sell_value=Decimal(ticker_data['sell']['value']),
sell_value_int=int(ticker_data['sell']['value_int']),
buy_value=Decimal(ticker_data['buy']['value']),
buy_value_int=int(ticker_data['buy']['value_int']),
cb_buy_value=cb_buy_value,
cb_buy_value_50=cb_buy_value_50,
bs_ask = bs_ticker['ask'],
bs_bid = bs_ticker['bid'],
bs_high = bs_ticker['high'],
bs_last = bs_ticker['last'],
bs_low = bs_ticker['low'],
bs_volume = bs_ticker['volume'],
mtgox_timestamp=ticker_data['now'],
)
ticker_history.save()
trader.delay(ticker_history.id)
email_notice.delay(str(ticker_history.buy_value), str(ticker_history.cb_buy_value), str(ticker_history.bs_ask))
else:
print 'Ticker data result other than success: "%s"' % res['result']
@task(ignore_results=True, name='dbtrade.apps.trader.tasks.email_notice')
def email_notice(mtgox_price, coinbase_price, bitstamp_price):
print 'mtgox_price=%s' % mtgox_price
print 'coinbase_price=%s' % coinbase_price
print 'bitstamp_price=%s' % bitstamp_price
mtgox_price = Decimal(mtgox_price)
coinbase_price = Decimal(coinbase_price)
bitstamp_price = Decimal(bitstamp_price)
timedeltas = {
'HOURLY': timedelta(hours=1),
'DAILY': timedelta(days=1),
'WEEKLY': timedelta(days=7)
}
for market in ['mtgox', 'coinbase', 'bitstamp']:
print 'Market %s' % market
for point in ['high', 'low']:
now = datetime.now()
price = locals()['%s_price' % market]
if point == 'high':
point_extra = 'lte'
else:
point_extra = 'gte'
kwargs = {
'market': market.upper(),
'%s_price_point__%s' % (point, point_extra): price,
'active': True
}
print 'Querying for %s' % str(kwargs)
universal_time_exclusion = now - timedeltas['HOURLY']
emails = EmailNotice.objects.filter(**kwargs).exclude(last_sent__gte=universal_time_exclusion)
print 'Found %d matching notices...' % emails.count()
for email in emails:
max_date = now - timedeltas[email.frequency]
recent_logs = EmailNoticeLog.objects.filter(email_notice=email,date_added__gte=max_date)
recent_log = recent_logs.order_by('id').reverse()[:1]
try:
recent_log[0]
except IndexError:
print 'No outgoing emails logged for %s range for %s' % (email.frequency, email.email)
else:
#: We have sent another email within the window, for DAILY and WEEKLY folks.
print 'Previous outgoing email was sent at %s for %s. Skipping...' % (str(recent_log[0].date_added),
email.email)
continue
if email.max_send != None and recent_logs.count() + 1 >= email.max_send:
email.active = False
message = '%s\nYour price notification was activated due to price of $%s on %s.\n\n' % (str(now),
str(price),
market)
message += 'See latest charts at https://daybittrader.com/\n'
message += 'Edit or cancel this notification: https://daybittrader.com/notification/%s' % email.uuid
print 'Sending...'
send_mail(subject='Bitcoin price notification for %s ($%s)' % (str(now), str(price)),
message=message, from_email='Bitcoin Notifications <%s>' % settings.EMAIL_HOST_USER,
recipient_list=[email.email])
email.last_sent = datetime.now()
email.save()
|
11,559 | ff4378460279731b297b93c221701bb4534f1326 | import requests
r = requests.get('http://knit.ac.in/')
print (r.status_code)
print (r.headers)
print (r.encoding) |
11,560 | a412b2bb3ddffb5c9ebf2a84737fb29c9279af3d | #estaciones "IOPV"
import os
palabra=os.sys.argv[1]
for estacion in palabra:
if(estacion=="V"):
print("Verano")
if(estacion=="P"):
print("Primavera")
if(estacion=="O"):
print("Otoño")
if(estacion=="I"):
print("Invierno")
#fin bucle
|
11,561 | a58fa9c0c2ee0917ae92f713516a04f3fef01d53 | import boto3
import paramiko
class spin_and_execute:
def __init__(self):
self.ssh_output = None
self.ssh_error = None
self.client = None
#self.host= conf_file.HOST
#self.username = conf_file.USERNAME
#self.password = conf_file.PASSWORD
#self.timeout = float(conf_file.TIMEOUT)
#self.commands = conf_file.COMMANDS
#self.pkey = conf_file.PKEY
#self.port = conf_file.PORT
#self.uploadremotefilepath = conf_file.UPLOADREMOTEFILEPATH
#self.uploadlocalfilepath = conf_file.UPLOADLOCALFILEPATH
#self.downloadremotefilepath = conf_file.DOWNLOADREMOTEFILEPATH
#self.downloadlocalfilepath = conf_file.DOWNLOADLOCALFILEPATH
ec2 = boto3.resource('ec2')
# image ubuntu
## ami-0ac019f4fcb7cb7e6
# create a new EC2 instance
instances = ec2.create_instances(
ImageId='ami-0ac019f4fcb7cb7e6',
MinCount=1,
MaxCount=1,
InstanceType='t2.xlarge',
KeyName='ec2-keypair_test2_boto3',
BlockDeviceMappings=[{"DeviceName": "/dev/sda1","Ebs" : { "VolumeSize" : 50 }}]
)
# Boto 3
# Use the filter() method of the instances collection to retrieve
# all running EC2 instances.
running_instances = ec2.instances.filter(
Filters=[{'Name': 'instance-state-name', 'Values': ['running']}])
for instance in running_instances:
print(instance.id, instance.instance_type)
url_login=instance.public_dns_name
|
11,562 | bf996f0b467466e26e2f496d8dd6f0b5302dddc4 | import json
from unittest.mock import MagicMock, Mock
from uuid import uuid4
import pytest
from django.contrib.auth.models import AnonymousUser
from django.core import signing
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse
from django.urls import reverse
from django_babel.templatetags.babel import currencyfmt
from prices import Money, TaxedMoney
from saleor.cart import CartStatus, forms, utils
from saleor.cart.context_processors import cart_counter
from saleor.cart.models import Cart, find_open_cart_for_user
from saleor.cart.views import update
from saleor.core.exceptions import InsufficientStock
from saleor.discount.models import Sale
from saleor.shipping.utils import get_shipment_options
@pytest.fixture()
def cart_request_factory(rf, monkeypatch):
def create_request(user=None, token=None):
request = rf.get(reverse('home'))
if user is None:
request.user = AnonymousUser()
else:
request.user = user
request.discounts = Sale.objects.all()
monkeypatch.setattr(
request, 'get_signed_cookie', Mock(return_value=token))
return request
return create_request
@pytest.fixture()
def opened_anonymous_cart(db):
return Cart.objects.get_or_create(user=None, status=CartStatus.OPEN)[0]
@pytest.fixture()
def cancelled_anonymous_cart(db):
return Cart.objects.get_or_create(user=None, status=CartStatus.CANCELED)[0]
@pytest.fixture()
def opened_user_cart(customer_user):
return Cart.objects.get_or_create(
user=customer_user, status=CartStatus.OPEN)[0]
@pytest.fixture()
def cancelled_user_cart(customer_user):
return Cart.objects.get_or_create(
user=customer_user, status=CartStatus.CANCELED)[0]
@pytest.fixture()
def local_currency(monkeypatch):
def side_effect(price, currency):
return price
monkeypatch.setattr('saleor.cart.views.to_local_currency', side_effect)
def test_get_or_create_anonymous_cart_from_token(
opened_anonymous_cart, cancelled_anonymous_cart, opened_user_cart,
cancelled_user_cart):
queryset = Cart.objects.all()
carts = list(queryset)
cart = utils.get_or_create_anonymous_cart_from_token(
opened_anonymous_cart.token)
assert Cart.objects.all().count() == 4
assert cart == opened_anonymous_cart
# test against getting closed carts
cart = utils.get_or_create_anonymous_cart_from_token(
cancelled_anonymous_cart.token)
assert Cart.objects.all().count() == 5
assert cart not in carts
assert cart.user is None
assert cart.status == CartStatus.OPEN
cart.delete()
# test against new token
cart = utils.get_or_create_anonymous_cart_from_token(uuid4())
assert Cart.objects.all().count() == 5
assert cart not in carts
assert cart.user is None
assert cart.status == CartStatus.OPEN
cart.delete()
# test against getting cart assigned to user
cart = utils.get_or_create_anonymous_cart_from_token(
opened_user_cart.token)
assert Cart.objects.all().count() == 5
assert cart not in carts
assert cart.user is None
assert cart.status == CartStatus.OPEN
cart.delete()
def test_get_or_create_user_cart(
customer_user, opened_anonymous_cart, cancelled_anonymous_cart,
opened_user_cart, cancelled_user_cart, admin_user):
cart = utils.get_or_create_user_cart(customer_user)
assert Cart.objects.all().count() == 4
assert cart == opened_user_cart
# test against getting closed carts
Cart.objects.create(user=admin_user, status=CartStatus.CANCELED)
queryset = Cart.objects.all()
carts = list(queryset)
cart = utils.get_or_create_user_cart(admin_user)
assert Cart.objects.all().count() == 6
assert cart not in carts
assert cart.user is admin_user
assert cart.status == CartStatus.OPEN
cart.delete()
def test_get_anonymous_cart_from_token(
opened_anonymous_cart, cancelled_anonymous_cart, opened_user_cart,
cancelled_user_cart):
cart = utils.get_anonymous_cart_from_token(opened_anonymous_cart.token)
assert Cart.objects.all().count() == 4
assert cart == opened_anonymous_cart
# test against getting closed carts
cart = utils.get_anonymous_cart_from_token(cancelled_anonymous_cart.token)
assert Cart.objects.all().count() == 4
assert cart is None
# test against new token
cart = utils.get_anonymous_cart_from_token(uuid4())
assert Cart.objects.all().count() == 4
assert cart is None
# test against getting cart assigned to user
cart = utils.get_anonymous_cart_from_token(opened_user_cart.token)
assert Cart.objects.all().count() == 4
assert cart is None
def test_get_user_cart(
opened_anonymous_cart, cancelled_anonymous_cart, opened_user_cart,
cancelled_user_cart, admin_user, customer_user):
cart = utils.get_user_cart(customer_user)
assert Cart.objects.all().count() == 4
assert cart == opened_user_cart
# test against getting closed carts
Cart.objects.create(user=admin_user, status=CartStatus.CANCELED)
cart = utils.get_user_cart(admin_user)
assert Cart.objects.all().count() == 5
assert cart is None
def test_get_or_create_cart_from_request(
cart_request_factory, monkeypatch, customer_user):
token = uuid4()
queryset = Cart.objects.all()
request = cart_request_factory(user=customer_user, token=token)
user_cart = Cart(user=customer_user)
anonymous_cart = Cart()
mock_get_for_user = Mock(return_value=user_cart)
mock_get_for_anonymous = Mock(return_value=anonymous_cart)
monkeypatch.setattr(
'saleor.cart.utils.get_or_create_user_cart', mock_get_for_user)
monkeypatch.setattr(
'saleor.cart.utils.get_or_create_anonymous_cart_from_token',
mock_get_for_anonymous)
returned_cart = utils.get_or_create_cart_from_request(request, queryset)
mock_get_for_user.assert_called_once_with(customer_user, queryset)
assert returned_cart == user_cart
request = cart_request_factory(user=None, token=token)
returned_cart = utils.get_or_create_cart_from_request(request, queryset)
mock_get_for_anonymous.assert_called_once_with(token, queryset)
assert returned_cart == anonymous_cart
def test_get_cart_from_request(
monkeypatch, customer_user, cart_request_factory):
queryset = Cart.objects.all()
token = uuid4()
request = cart_request_factory(user=customer_user, token=token)
user_cart = Cart(user=customer_user)
mock_get_for_user = Mock(return_value=user_cart)
monkeypatch.setattr(
'saleor.cart.utils.get_user_cart', mock_get_for_user)
returned_cart = utils.get_cart_from_request(request, queryset)
mock_get_for_user.assert_called_once_with(customer_user, queryset)
assert returned_cart == user_cart
assert list(returned_cart.discounts) == list(request.discounts)
mock_get_for_user = Mock(return_value=None)
monkeypatch.setattr(
'saleor.cart.utils.get_user_cart', mock_get_for_user)
returned_cart = utils.get_cart_from_request(request, queryset)
mock_get_for_user.assert_called_once_with(customer_user, queryset)
assert not Cart.objects.filter(token=returned_cart.token).exists()
anonymous_cart = Cart()
mock_get_for_anonymous = Mock(return_value=anonymous_cart)
monkeypatch.setattr(
'saleor.cart.utils.get_anonymous_cart_from_token',
mock_get_for_anonymous)
request = cart_request_factory(user=None, token=token)
returned_cart = utils.get_cart_from_request(request, queryset)
mock_get_for_user.assert_called_once_with(customer_user, queryset)
assert returned_cart == anonymous_cart
mock_get_for_anonymous = Mock(return_value=None)
monkeypatch.setattr(
'saleor.cart.utils.get_anonymous_cart_from_token',
mock_get_for_anonymous)
returned_cart = utils.get_cart_from_request(request, queryset)
assert not Cart.objects.filter(token=returned_cart.token).exists()
def test_find_and_assign_anonymous_cart(
opened_anonymous_cart, customer_user, client):
cart_token = opened_anonymous_cart.token
# Anonymous user has a cart with token stored in cookie
value = signing.get_cookie_signer(salt=utils.COOKIE_NAME).sign(cart_token)
client.cookies[utils.COOKIE_NAME] = value
# Anonymous logs in
response = client.post(
reverse('account:login'),
{'username': customer_user.email, 'password': 'password'}, follow=True)
assert response.context['user'] == customer_user
# User should have only one cart, the same as he had previously in
# anonymous session
authenticated_user_carts = customer_user.carts.filter(
status=CartStatus.OPEN)
assert authenticated_user_carts.count() == 1
assert authenticated_user_carts[0].token == cart_token
def test_login_without_a_cart(customer_user, client):
assert utils.COOKIE_NAME not in client.cookies
response = client.post(
reverse('account:login'),
{'username': customer_user.email, 'password': 'password'}, follow=True)
assert response.context['user'] == customer_user
authenticated_user_carts = customer_user.carts.filter(
status=CartStatus.OPEN)
assert authenticated_user_carts.count() == 0
def test_login_with_incorrect_cookie_token(customer_user, client):
value = signing.get_cookie_signer(salt=utils.COOKIE_NAME).sign('incorrect')
client.cookies[utils.COOKIE_NAME] = value
response = client.post(
reverse('account:login'),
{'username': customer_user.email, 'password': 'password'}, follow=True)
assert response.context['user'] == customer_user
authenticated_user_carts = customer_user.carts.filter(
status=CartStatus.OPEN)
assert authenticated_user_carts.count() == 0
def test_find_and_assign_anonymous_cart_and_close_opened(
customer_user, opened_user_cart, opened_anonymous_cart,
cart_request_factory):
token = opened_anonymous_cart.token
token_user = opened_user_cart.token
request = cart_request_factory(user=customer_user, token=token)
utils.find_and_assign_anonymous_cart()(
lambda request: Mock(delete_cookie=lambda name: None))(request)
token_cart = Cart.objects.filter(token=token).first()
user_cart = Cart.objects.filter(token=token_user).first()
assert token_cart.user.pk == customer_user.pk
assert token_cart.status == CartStatus.OPEN
assert user_cart.status == CartStatus.CANCELED
def test_adding_without_checking(cart, product_in_stock):
variant = product_in_stock.variants.get()
cart.add(variant, 1000, check_quantity=False)
assert len(cart) == 1
def test_adding_zero_quantity(cart, product_in_stock):
variant = product_in_stock.variants.get()
cart.add(variant, 0)
assert len(cart) == 0
def test_adding_same_variant(cart, product_in_stock):
variant = product_in_stock.variants.get()
cart.add(variant, 1)
cart.add(variant, 2)
assert len(cart) == 1
assert cart.count() == {'total_quantity': 3}
assert cart.get_total().gross == Money(30, 'USD')
def test_replacing_same_variant(cart, product_in_stock):
variant = product_in_stock.variants.get()
cart.add(variant, 1, replace=True)
cart.add(variant, 2, replace=True)
assert len(cart) == 1
assert cart.count() == {'total_quantity': 2}
def test_adding_invalid_quantity(cart, product_in_stock):
variant = product_in_stock.variants.get()
with pytest.raises(ValueError):
cart.add(variant, -1)
@pytest.mark.parametrize('create_line_data, get_line_data, lines_equal', [
(None, None, True),
({'gift-wrap': True}, None, False),
({'gift-wrap': True}, {'gift-wrap': True}, True)])
def test_getting_line(
create_line_data, get_line_data, lines_equal, cart, product_in_stock):
variant = product_in_stock.variants.get()
assert cart.get_line(variant) is None
line = cart.create_line(variant, 1, create_line_data)
fetched_line = cart.get_line(variant, data=get_line_data)
lines_are_equal = fetched_line == line
assert lines_equal is lines_are_equal
def test_change_status(cart):
with pytest.raises(ValueError):
cart.change_status('spanish inquisition')
cart.change_status(CartStatus.OPEN)
assert cart.status == CartStatus.OPEN
cart.change_status(CartStatus.CANCELED)
assert cart.status == CartStatus.CANCELED
def test_shipping_detection(cart, product_in_stock):
variant = product_in_stock.variants.get()
assert not cart.is_shipping_required()
cart.add(variant, 1, replace=True)
assert cart.is_shipping_required()
def test_cart_counter(monkeypatch):
monkeypatch.setattr(
'saleor.cart.context_processors.get_cart_from_request',
Mock(return_value=Mock(quantity=4)))
ret = cart_counter(Mock())
assert ret == {'cart_counter': 4}
def test_get_product_variants_and_prices():
variant = Mock(product_id=1, id=1)
cart = MagicMock(spec=Cart)
cart.lines.all.return_value = [
Mock(
quantity=1, variant=variant,
get_price_per_item=Mock(return_value=10))]
variants = list(utils.get_product_variants_and_prices(cart, variant))
assert variants == [(variant, 10)]
def test_contains_unavailable_variants():
missing_variant = Mock(
check_quantity=Mock(side_effect=InsufficientStock('')))
cart = MagicMock()
cart.lines.all.return_value = [Mock(variant=missing_variant)]
assert utils.contains_unavailable_variants(cart)
variant = Mock(check_quantity=Mock())
cart.lines.all.return_value = [Mock(variant=variant)]
assert not utils.contains_unavailable_variants(cart)
def test_remove_unavailable_variants(cart, product_in_stock):
variant = product_in_stock.variants.get()
cart.add(variant, 1)
variant.stock.update(quantity=0)
utils.remove_unavailable_variants(cart)
assert len(cart) == 0
def test_check_product_availability_and_warn(
monkeypatch, cart, product_in_stock):
variant = product_in_stock.variants.get()
cart.add(variant, 1)
monkeypatch.setattr(
'django.contrib.messages.warning', Mock(warning=Mock()))
monkeypatch.setattr(
'saleor.cart.utils.contains_unavailable_variants',
Mock(return_value=False))
utils.check_product_availability_and_warn(MagicMock(), cart)
assert len(cart) == 1
monkeypatch.setattr(
'saleor.cart.utils.contains_unavailable_variants',
Mock(return_value=True))
monkeypatch.setattr(
'saleor.cart.utils.remove_unavailable_variants',
lambda c: c.add(variant, 0, replace=True))
utils.check_product_availability_and_warn(MagicMock(), cart)
assert len(cart) == 0
def test_add_to_cart_form():
cart_lines = []
cart = Mock(
add=lambda variant, quantity: cart_lines.append(variant),
get_line=Mock(return_value=Mock(quantity=1)))
data = {'quantity': 1}
form = forms.AddToCartForm(data=data, cart=cart, product=Mock())
product_variant = Mock(check_quantity=Mock(return_value=None))
form.get_variant = Mock(return_value=product_variant)
assert form.is_valid()
form.save()
assert cart_lines == [product_variant]
with pytest.raises(NotImplementedError):
data = {'quantity': 1}
form = forms.AddToCartForm(data=data, cart=cart, product=Mock())
form.is_valid()
data = {}
form = forms.AddToCartForm(data=data, cart=cart, product=Mock())
assert not form.is_valid()
def test_form_when_variant_does_not_exist():
cart_lines = []
cart = Mock(
add=lambda variant, quantity: cart_lines.append(Mock()),
get_line=Mock(return_value=Mock(quantity=1)))
form = forms.AddToCartForm(data={'quantity': 1}, cart=cart, product=Mock())
form.get_variant = Mock(side_effect=ObjectDoesNotExist)
assert not form.is_valid()
def test_add_to_cart_form_when_empty_stock():
cart_lines = []
cart = Mock(
add=lambda variant, quantity: cart_lines.append(Mock()),
get_line=Mock(return_value=Mock(quantity=1)))
form = forms.AddToCartForm(data={'quantity': 1}, cart=cart, product=Mock())
exception_mock = InsufficientStock(
Mock(get_stock_quantity=Mock(return_value=1)))
product_variant = Mock(check_quantity=Mock(side_effect=exception_mock))
form.get_variant = Mock(return_value=product_variant)
assert not form.is_valid()
def test_add_to_cart_form_when_insufficient_stock():
cart_lines = []
cart = Mock(
add=lambda variant, quantity: cart_lines.append(variant),
get_line=Mock(return_value=Mock(quantity=1)))
form = forms.AddToCartForm(data={'quantity': 1}, cart=cart, product=Mock())
exception_mock = InsufficientStock(
Mock(get_stock_quantity=Mock(return_value=4)))
product_variant = Mock(check_quantity=Mock(side_effect=exception_mock))
form.get_variant = Mock(return_value=product_variant)
assert not form.is_valid()
def test_replace_cart_line_form(cart, product_in_stock):
variant = product_in_stock.variants.get()
initial_quantity = 1
replaced_quantity = 4
cart.add(variant, initial_quantity)
data = {'quantity': replaced_quantity}
form = forms.ReplaceCartLineForm(data=data, cart=cart, variant=variant)
assert form.is_valid()
form.save()
assert cart.quantity == replaced_quantity
def test_replace_cartline_form_when_insufficient_stock(
monkeypatch, cart, product_in_stock):
variant = product_in_stock.variants.get()
initial_quantity = 1
replaced_quantity = 4
cart.add(variant, initial_quantity)
exception_mock = InsufficientStock(
Mock(get_stock_quantity=Mock(return_value=2)))
monkeypatch.setattr(
'saleor.product.models.ProductVariant.check_quantity',
Mock(side_effect=exception_mock))
data = {'quantity': replaced_quantity}
form = forms.ReplaceCartLineForm(data=data, cart=cart, variant=variant)
assert not form.is_valid()
with pytest.raises(KeyError):
form.save()
assert cart.quantity == initial_quantity
def test_view_empty_cart(client, request_cart):
response = client.get(reverse('cart:index'))
assert response.status_code == 200
def test_view_cart(client, sale, product_in_stock, request_cart):
variant = product_in_stock.variants.get()
request_cart.add(variant, 1)
response = client.get(reverse('cart:index'))
response_cart_line = response.context[0]['cart_lines'][0]
cart_line = request_cart.lines.first()
assert not response_cart_line['get_total'] == cart_line.get_total()
assert response.status_code == 200
def test_view_update_cart_quantity(
client, local_currency, product_in_stock, request_cart):
variant = product_in_stock.variants.get()
request_cart.add(variant, 1)
response = client.post(
reverse('cart:update-line', kwargs={'variant_id': variant.pk}),
data={'quantity': 3}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.status_code == 200
assert request_cart.quantity == 3
def test_view_invalid_update_cart(client, product_in_stock, request_cart):
variant = product_in_stock.variants.get()
request_cart.add(variant, 1)
response = client.post(
reverse('cart:update-line', kwargs={'variant_id': variant.pk}),
data={}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
resp_decoded = json.loads(response.content.decode('utf-8'))
assert response.status_code == 400
assert 'error' in resp_decoded.keys()
assert request_cart.quantity == 1
def test_cart_page_without_openexchagerates(
client, product_in_stock, request_cart, settings):
settings.OPENEXCHANGERATES_API_KEY = None
variant = product_in_stock.variants.get()
request_cart.add(variant, 1)
response = client.get(reverse('cart:index'))
context = response.context
assert context['local_cart_total'] is None
def test_cart_page_with_openexchagerates(
client, monkeypatch, product_in_stock, request_cart, settings):
settings.DEFAULT_COUNTRY = 'PL'
settings.OPENEXCHANGERATES_API_KEY = 'fake-key'
variant = product_in_stock.variants.get()
request_cart.add(variant, 1)
response = client.get(reverse('cart:index'))
context = response.context
assert context['local_cart_total'] is None
monkeypatch.setattr(
'django_prices_openexchangerates.models.get_rates',
lambda c: {'PLN': Mock(rate=2)})
response = client.get(reverse('cart:index'))
context = response.context
assert context['local_cart_total'].currency == 'PLN'
def test_cart_summary_page(client, product_in_stock, request_cart):
variant = product_in_stock.variants.get()
request_cart.add(variant, 1)
response = client.get(reverse('cart:cart-summary'))
assert response.status_code == 200
content = response.context
assert content['quantity'] == request_cart.quantity
cart_total = request_cart.get_total()
assert content['total'] == currencyfmt(
cart_total.gross.amount, cart_total.currency)
assert len(content['lines']) == 1
cart_line = content['lines'][0]
assert cart_line['variant'] == variant.name
assert cart_line['quantity'] == 1
def test_cart_summary_page_empty_cart(client, request_cart):
response = client.get(reverse('cart:cart-summary'))
assert response.status_code == 200
data = response.context
assert data['quantity'] == 0
def test_total_with_discount(client, sale, request_cart, product_in_stock):
sales = Sale.objects.all()
variant = product_in_stock.variants.get()
request_cart.add(variant, 1)
line = request_cart.lines.first()
assert line.get_total(discounts=sales) == TaxedMoney(
net=Money(5, 'USD'), gross=Money(5, 'USD'))
def test_cart_queryset(customer_user):
canceled_cart = Cart.objects.create(status=CartStatus.CANCELED)
canceled = Cart.objects.canceled()
assert canceled.filter(pk=canceled_cart.pk).exists()
def test_find_open_cart_for_user(customer_user, opened_user_cart):
assert find_open_cart_for_user(customer_user) == opened_user_cart
cart = Cart.objects.create(user=customer_user)
assert find_open_cart_for_user(customer_user) == cart
assert (
Cart.objects.get(pk=opened_user_cart.pk).status == CartStatus.CANCELED)
def test_cart_repr():
cart = Cart()
assert repr(cart) == 'Cart(quantity=0)'
cart.quantity = 1
assert repr(cart) == 'Cart(quantity=1)'
def test_cart_get_total_empty(db):
cart = Cart.objects.create()
with pytest.raises(AttributeError):
cart.get_total()
def test_cart_change_user(customer_user):
cart1 = Cart.objects.create()
cart1.change_user(customer_user)
cart2 = Cart.objects.create()
cart2.change_user(customer_user)
old_cart = Cart.objects.get(pk=cart1.pk)
assert old_cart.user == customer_user
assert old_cart.status == CartStatus.CANCELED
def test_cart_line_repr(product_in_stock, request_cart_with_item):
variant = product_in_stock.variants.get()
line = request_cart_with_item.lines.first()
assert repr(line) == 'CartLine(variant=%r, quantity=%r, data=%r)' % (
variant, line.quantity, line.data)
def test_cart_line_state(product_in_stock, request_cart_with_item):
variant = product_in_stock.variants.get()
line = request_cart_with_item.lines.first()
assert line.__getstate__() == (variant, line.quantity, line.data)
line.__setstate__((variant, 2, line.data))
assert line.quantity == 2
def test_get_category_variants_and_prices(
default_category, product_in_stock, request_cart_with_item):
result = list(utils.get_category_variants_and_prices(
request_cart_with_item, default_category))
variant = product_in_stock.variants.get()
assert result[0][0] == variant
def test_update_view_must_be_ajax(customer_user, rf):
request = rf.post(reverse('home'))
request.user = customer_user
request.discounts = None
result = update(request, 1)
assert result.status_code == 302
def test_get_or_create_db_cart(customer_user, db, rf):
def view(request, cart, *args, **kwargs):
return HttpResponse()
decorated_view = utils.get_or_create_db_cart()(view)
assert Cart.objects.filter(user=customer_user).count() == 0
request = rf.get(reverse('home'))
request.user = customer_user
decorated_view(request)
assert Cart.objects.filter(user=customer_user).count() == 1
request.user = AnonymousUser()
decorated_view(request)
assert Cart.objects.filter(user__isnull=True).count() == 1
def test_get_cart_data(request_cart_with_item, shipping_method):
shipment_option = get_shipment_options('PL')
cart_data = utils.get_cart_data(
request_cart_with_item, shipment_option, 'USD', None)
assert cart_data['cart_total'] == TaxedMoney(
net=Money(10, 'USD'), gross=Money(10, 'USD'))
assert cart_data['total_with_shipping'].start == TaxedMoney(
net=Money(20, 'USD'), gross=Money(20, 'USD'))
def test_get_cart_data_no_shipping(request_cart_with_item):
shipment_option = get_shipment_options('PL')
cart_data = utils.get_cart_data(
request_cart_with_item, shipment_option, 'USD', None)
cart_total = cart_data['cart_total']
assert cart_total == TaxedMoney(
net=Money(10, 'USD'), gross=Money(10, 'USD'))
assert cart_data['total_with_shipping'].start == cart_total
|
11,563 | 7e9234597455aeaa1b0bdff7d179cd04b4d2194e | #!/usr/bin/env python
# coding=utf-8
import urllib3
import re
from db.db_mongo import add_fund_info, fund_set_count, update_fund_info
pool = urllib3.PoolManager()
# 初始化基金信息
def init_fund_infos():
data_re = re.compile(r'=.\[(.*?)\];$')
item_re = re.compile(r'\[(\".*?\")\]')
# today = datetime.datetime.now().strftime('%Y-%m-%d')
r_url = 'http://fund.eastmoney.com/js/fundcode_search.js'
res_data = pool.request('GET', r_url).data.decode('utf-8')
for line in data_re.findall(res_data):
if line != "":
for line2 in item_re.findall(line):
item_list = line2.split(',')
fund_code = item_list[0].replace("\"", "")
fund_name = item_list[2].replace("\"", "")
add_fund_info(fund_code, fund_name)
# 更新基金信息
def update_fund_infos():
if fund_set_count() == 0:
init_fund_infos()
else:
data_re = re.compile(r'=.\[(.*?)\];$')
item_re = re.compile(r'\[(.*?)\]')
# today = datetime.datetime.now().strftime('%Y-%m-%d')
r_url = 'http://fund.eastmoney.com/js/fundcode_search.js'
res_data = pool.request('GET', r_url).data.decode('utf-8')
for line in data_re.findall(res_data):
if line != "":
for line2 in item_re.findall(line):
item_list = line2.split(',')
fund_code = item_list[0]
fund_name = item_list[2]
update_fund_info(fund_code, fund_name)
if __name__ == "__main__":
update_fund_infos()
|
11,564 | 5add37663126aa7290c17d8891b753ef567001cf | from django.db import models
class Pais(models.Model):
nombre = models.CharField(max_length=30)
class Ciudad(models.Model):
nombre = models.CharField(verbose_name='Nombre de la ciudad', max_length=30)
pais = models.ForeignKey(Pais, on_delete=models.CASCADE)
poblacion = models.PositiveIntegerField()
|
11,565 | c92ca95b3219be78baede0f75f701e476a406da2 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pathlib import Path
import numpy as np
import pandas as pd
import pytest
from quilt3distribute import FeatureDefinition
from quilt3distribute.validation import Validator, validate
@pytest.mark.parametrize("dtype, validation_functions", [
(str, []),
(str, ()),
(str, (lambda x: x == "hello world",)),
(str, (lambda x: "hello" in x, lambda x: "world" in x)),
(Path, ()),
pytest.param(str, {lambda x: x == "non-iterable-type"}, marks=pytest.mark.raises(exception=TypeError)),
pytest.param(str, ("this will fail because not callable",), marks=pytest.mark.raises(exception=TypeError))
])
def test_feature_def_init(dtype, validation_functions):
fd = FeatureDefinition(dtype, validation_functions)
# Specific check for path behavior
if dtype == Path:
assert fd.cast_values
@pytest.mark.parametrize("values, definition, drop_on_error, expected_drops", [
(np.ones(5), FeatureDefinition(np.float64), False, set()),
(np.ones(5), FeatureDefinition(np.float64), True, set()),
(np.array(["hello", "world"]), FeatureDefinition(str), False, set()),
(np.ones(5), FeatureDefinition(np.float64, (lambda x: x == 1,)), False, set()),
pytest.param(np.ones(5), FeatureDefinition(Path), False, set(), marks=pytest.mark.raises(exception=ValueError)),
(np.ones(5), FeatureDefinition(Path), True, set([0, 1, 2, 3, 4])),
pytest.param(
np.array(["hello", "world"]), FeatureDefinition(int, cast_values=True), False, set(),
marks=pytest.mark.raises(exception=ValueError)
),
(np.array(["hello", "world"]), FeatureDefinition(int, cast_values=True), True, set([0, 1])),
pytest.param(
np.array(["hello", "world"]), FeatureDefinition(int), False, set(),
marks=pytest.mark.raises(exception=TypeError)
),
(np.array(["hello", "world"]), FeatureDefinition(int), True, set([0, 1])),
pytest.param(
np.array(["1.png", "2.png"]), FeatureDefinition(Path), False, set(),
marks=pytest.mark.raises(exception=FileNotFoundError)
),
(np.array(["1.png", "2.png"]), FeatureDefinition(Path), True, set([0, 1])),
pytest.param(
np.array([Path("1.png"), Path("2.png")]), FeatureDefinition(Path), False, set(),
marks=pytest.mark.raises(exception=FileNotFoundError)
),
(np.array([Path("1.png"), Path("2.png")]), FeatureDefinition(Path), True, set([0, 1])),
pytest.param(
np.ones(5), FeatureDefinition(np.float64, (lambda x: x == 2,)), False, set(),
marks=pytest.mark.raises(exception=ValueError)
),
(np.ones(5), FeatureDefinition(np.float64, (lambda x: x == 2,)), True, set([0, 1, 2, 3, 4]))
])
def test_validator_process(values, definition, drop_on_error, expected_drops):
v = Validator("test", values, definition, drop_on_error)
results = v.process()
if drop_on_error:
assert expected_drops == {r.index for r in results.errored_results}
else:
assert len(results.errored_results) == 0
@pytest.mark.parametrize("data, drop_on_error, expected_len", [
(pd.DataFrame([{"floats": 1.0}, {"floats": 2.0}]), False, 2),
pytest.param(
pd.DataFrame([{"test_path": "1.png"}, {"test_path": "2.png"}]), False, 2,
marks=pytest.mark.raises(exception=FileNotFoundError)
),
(pd.DataFrame([{"test_path": "1.png"}, {"test_path": "2.png"}]), True, 0)
])
def test_validate(data, drop_on_error, expected_len):
results = validate(data, drop_on_error=drop_on_error)
if drop_on_error:
assert len(results.data) == expected_len
|
11,566 | a26d1381f1a923c023ab6f7961fed15892a6caca | from datetime import datetime
def voto(nascimento):
global ano
idade = ano - nascimento
if idade < 16:
return f'Com {idade} anos, sua situação de voto é: PROIBIDO'
elif idade >= 18 and idade <= 65:
return f'Com {idade} anos, sua situação de voto é: OBRIGATÓRIO'
else:
return f'Com {idade} anos, sua situação de voto é: OPCIONAL'
print('=' * 12 + 'Desafio 101' + '=' * 12)
ano = datetime.now().year
data = int(input('Digite o ano de nascimento: '))
resp = voto(data)
print(resp)
|
11,567 | 44cd4f4945e71c456af52153f53adcf3194e171a | def codechef(n,a,b,c,floor):
#cook your dish here
diff_lis = []
time = abs(b-a)+c
if a>=b:
x1 = a
x2 = b
elif b>a:
x1 = b
x2 = a
for i in floor:
if x2<i<x1:
return time
for i in range(len(floor)):
if floor[i]>=x1:
diff_lis.append(floor[i]-x1)
elif floor[i]<=x2:
diff_lis.append(x2-floor[i])
time = time+2*min(diff_lis)
return time
if __name__ == "__main__":
t = int(input())
while t>0:
n,a,b,c = list(map(int,input().split()))
floor = list(map(int,input().split()))
print(codechef(n,a,b,c,floor))
t = t-1
|
11,568 | 1565b7521cb4cef6eeb7a48094398f3eee8e7f10 | from .base import *
from .logging import *
DEBUG = True
SERVER_EMAIL = "Wikilink Local <wikilink.local@localhost.localdomain>"
DEFAULT_FROM_EMAIL = SERVER_EMAIL
# Django Debug Toolbar config
# ------------------------------------------------------------------------------
# Sometimes, developers do not want the debug toolbar on their local environments,
# so we can disable it by not passing a REQUIREMENTS_FILE variable when building
# the docker containers
if os.environ["REQUIREMENTS_FILE"] == "local.txt":
INSTALLED_APPS += [
"debug_toolbar",
"django_extensions",
]
MIDDLEWARE += [
"debug_toolbar.middleware.DebugToolbarMiddleware",
]
INTERNAL_IPS = ["127.0.0.1", "localhost", "0.0.0.0"]
def show_toolbar(request):
return True
DEBUG_TOOLBAR_CONFIG = {
"SHOW_TOOLBAR_CALLBACK": show_toolbar,
}
|
11,569 | 6ce1c576b8b297e7d17d786a45424c797e6142ee | #!/usr/bin/env python3
import json
import sys
if len(sys.argv) != 2:
print(f"usage {__file__} <annotation file>")
sys.exit(1)
with open(sys.argv[1], "r") as f:
j = json.load(f)
topmodule = j["top_module"]
cnt = 0
for i, (modulename, annots) in enumerate(j["modules"].items()):
inlined = annots["inline"] if "inline" in annots else False
is_top = modulename == topmodule
if inlined and not is_top:
cnt += 1
msg = " (TOP MODULE)" if is_top else ""
print(f"{i+1:2d}. {modulename:30s} {inlined}{msg}")
print(f"\nTotal count: {cnt}")
|
11,570 | 92375817b72e8e982169748cab1af709331e6f01 | # coding: utf-8
# Author:南岛鹋
# Blog: www.ndmiao.cn
# Date :2021/2/27 21:09
# Tool :PyCharm
import sys
import pymysql
import time
import json
import traceback #追踪异常
import requests
def get_conn():
"""
:return: 连接,游标
"""
# 创建连接
conn = pymysql.connect(host="127.0.0.1",
user="root",
password="123456",
db="cov",
charset="utf8")
# 创建游标
cursor = conn.cursor() # 执行完毕返回的结果集默认以元组显示
return conn, cursor
def close_conn(conn, cursor):
if cursor:
cursor.close()
if conn:
conn.close()
def get_urltext(url):
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36',
}
r = requests.get(url, headers)
return r
def get_china_history():
"""
:return: 返回历史数据
"""
url = "https://api.inews.qq.com/newsqa/v1/query/inner/publish/modules/list?modules=chinaDayList,chinaDayAddList,nowConfirmStatis,provinceCompare"
r = get_urltext(url)
res = json.loads(r.text)
data_all = res['data']
history = {} # 历史数据
for i in data_all['chinaDayList']:
ds = i["y"] + "." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")
ds = time.strftime("%Y.%m.%d", tup)
confirm = i["confirm"]
suspect = i["suspect"]
heal = i["heal"]
dead = i["dead"]
history[ds] = {"confirm": confirm, "suspect": suspect, "heal": heal, "dead": dead}
for i in data_all['chinaDayAddList']:
ds = i["y"] + "." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")
ds = time.strftime("%Y.%m.%d", tup)
confirm = i["confirm"]
suspect = i["suspect"]
heal = i["heal"]
dead = i["dead"]
history[ds].update({"confirm_add": confirm, "suspect_add": suspect, "heal_add": heal, "dead_add": dead})
return history
def get_china_details():
"""
:return: 返回历史数据和当日详细数据
"""
url = "https://view.inews.qq.com/g2/getOnsInfo?name=disease_h5"
r = get_urltext(url)
res = json.loads(r.text)
data_all = json.loads(res['data'])
details = []
update_time = data_all["lastUpdateTime"]
data_country = data_all["areaTree"]
data_province = data_country[0]["children"]
for pro_infos in data_province:
province = pro_infos["name"] # 省名
for city_infos in pro_infos["children"]:
city = city_infos["name"]
confirm = city_infos["total"]["confirm"]
confirm_add = city_infos["today"]["confirm"]
heal = city_infos["total"]["heal"]
dead = city_infos["total"]["dead"]
details.append([update_time, province, city, confirm, confirm_add, heal, dead])
return details
def insert_china_details():
"""
更新 details 表
:return:
"""
cursor = None
conn = None
try:
li = get_china_details() # 0 是历史数据字典,1 最新详细数据列表
conn, cursor = get_conn()
#sql = "update china_details set update_time=%s,confirm=%s,confirm_add=%s,heal=%s,dead=%s where province=%s and city=%s"
sql = "insert into china_details(update_time,province,city,confirm,confirm_add,heal,dead) values(%s,%s,%s,%s,%s,%s,%s)"
sql_query = 'select %s=(select update_time from china_details order by id desc limit 1)' #对比当前最大时间戳
cursor.execute(sql_query,li[0][0])
if not cursor.fetchone()[0]:
print(f"{time.asctime()}开始插入最新数据")
for item in li:
cursor.execute(sql, item)
conn.commit() # 提交事务 update delete insert操作
print(f"{time.asctime()}插入最新数据完毕")
else:
print(f"{time.asctime()}已是最新数据!")
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def update_china_details():
"""
更新 details 表
:return:
"""
cursor = None
conn = None
try:
li = get_china_details() # 0 是历史数据字典,1 最新详细数据列表
conn, cursor = get_conn()
sql = "update china_details set update_time=%s,confirm=%s,confirm_add=%s,heal=%s,dead=%s where province=%s and city=%s"
# sql = "insert into china_details(update_time,province,city,confirm,confirm_add,heal,dead) values(%s,%s,%s,%s,%s,%s,%s)"
sql_query = 'select %s=(select update_time from china_details order by id desc limit 1)' #对比当前最大时间戳
cursor.execute(sql_query,li[0][0])
if not cursor.fetchone()[0]:
print(f"{time.asctime()}开始更新最新数据")
for item in li:
cursor.execute(sql, [item[0],item[3],item[4],item[5],item[6],item[1],item[2]])
conn.commit() # 提交事务 update delete insert操作
print(f"{time.asctime()}更新最新数据完毕")
else:
print(f"{time.asctime()}已是最新数据!")
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def china_details():
cursor = None
conn = None
try:
conn, cursor = get_conn()
sql = "select count(*) from china_details"
cursor.execute(sql)
num = cursor.fetchall()[0][0]
if num > 1:
update_china_details()
else:
insert_china_details()
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def update_history():
"""
更新历史数据
:return:
"""
cursor = None
conn = None
try:
dic = get_china_history() # 0 是历史数据字典,1 最新详细数据列表
print(f"{time.asctime()}开始更新历史数据")
conn, cursor = get_conn()
sql = "insert into china_history values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
sql_query = "select confirm from china_history where ds=%s"
for k, v in dic.items():
# item 格式 {'2020-01-13': {'confirm': 41, 'suspect': 0, 'heal': 0, 'dead': 1}
if not cursor.execute(sql_query, k):
cursor.execute(sql, [k, v.get("confirm"), v.get("confirm_add"), v.get("suspect"),
v.get("suspect_add"), v.get("heal"), v.get("heal_add"),
v.get("dead"), v.get("dead_add")])
conn.commit() # 提交事务 update delete insert操作
print(f"{time.asctime()}历史数据更新完毕")
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def get_city_details():
url = 'https://api.inews.qq.com/newsqa/v1/automation/modules/list?modules=FAutoforeignList'
r = get_urltext(url)
res = json.loads(r.text)
data = res['data']
data_all = data['FAutoforeignList']
city_details = []
for i in data_all:
ds = i["y"] + "." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")
ds = time.strftime("%Y.%m.%d", tup)
country = i["name"]
country_confirm = i["confirm"]
dead = i["dead"]
heal = i["heal"]
# print(i.setdefault('children',0))
try:
for j in i['children']:
city_ds = i["y"] + "." + j["date"]
tup = time.strptime(city_ds, "%Y.%m.%d")
city_ds = time.strftime("%Y.%m.%d", tup)
city = j["name"]
nameMap = j["nameMap"]
city_confirm = j["confirm"]
city_dead = j["dead"]
city_heal = j["heal"]
city_details.append([city_ds, country, city, nameMap, city_confirm, city_dead, city_heal])
except:
city_details.append([ds, country, country, '', country_confirm, dead, heal])
return city_details
def get_country_details():
url = "https://api.inews.qq.com/newsqa/v1/automation/foreign/country/ranklist"
r = get_urltext(url)
res = json.loads(r.text)
data_all = res['data']
country_details = []
for i in data_all:
ds = i["y"] + "." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")
ds = time.strftime("%Y.%m.%d", tup)
country = i["name"]
continent = i["continent"]
confirm = i["confirm"]
confirmAdd = i["confirmAdd"]
dead = i["dead"]
heal = i["heal"]
nowConfirm = i["nowConfirm"]
country_details.append([ds, country, continent, confirm, confirmAdd, dead, heal, nowConfirm])
return country_details
def update_country_details():
"""
更新 details 表
:return:
"""
cursor = None
conn = None
try:
li = get_country_details()
print(f"{time.asctime()}开始更新历史数据")
conn, cursor = get_conn()
update_sql = "update country_details set update_time=%s,confirm=%s,confirm_add=%s,dead=%s,heal=%s,nowConfirm=%s where country=%s"
#sql = "insert into country_details(update_time,country,continent,confirm,confirm_add,dead,heal,nowConfirm) values(%s,%s,%s,%s,%s,%s,%s,%s)"
sql_query = 'select %s=(select update_time from country_details where country=%s)'
for i in li:
cursor.execute(sql_query,[i[0],i[1]])
if not cursor.fetchone()[0]:
cursor.execute(update_sql,[i[0],i[3],i[4],i[5],i[6],i[7],i[1]])
conn.commit() # 提交事务 update delete insert操作
print(f"{time.asctime()}历史数据更新完毕")
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def insert_country_details():
"""
更新 details 表
:return:
"""
cursor = None
conn = None
try:
li = get_country_details()
print(f"{time.asctime()}开始插入历史数据")
conn, cursor = get_conn()
# update_sql = "update country_details set update_time=%s,confirm=%s,confirm_add=%s,dead=%s,heal=%s,nowConfirm=%s where country=%s"
sql = "insert into country_details(update_time,country,continent,confirm,confirm_add,dead,heal,nowConfirm) values(%s,%s,%s,%s,%s,%s,%s,%s)"
sql_query = "select update_time from country_details where country=%s"
for i in li:
if not cursor.execute(sql_query, i[1]):
cursor.execute(sql,i)
conn.commit() # 提交事务 update delete insert操作
print(f"{time.asctime()}历史数据插入完毕")
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def country_details():
cursor = None
conn = None
try:
conn, cursor = get_conn()
sql = "select count(*) from country_details"
cursor.execute(sql)
num = cursor.fetchall()[0][0]
if num > 1:
update_country_details()
else:
insert_country_details()
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def insert_city_details():
"""
更新 details 表
:return:
"""
cursor = None
conn = None
try:
li = get_city_details()
print(f"{time.asctime()}开始更新历史数据")
conn, cursor = get_conn()
# update_sql = "update country_details set update_time=%s,confirm=%s,confirm_add=%s,dead=%s,heal=%s,nowConfirm=%s where country=%s"
sql = "insert into city_details(update_time,country,city,nameMap,confirm,dead,heal) values(%s,%s,%s,%s,%s,%s,%s)"
sql_query = "select update_time from city_details where country=%s and city=%s"
for i in li:
if not cursor.execute(sql_query, [i[1],i[2]]):
cursor.execute(sql,i)
conn.commit() # 提交事务 update delete insert操作
print(f"{time.asctime()}历史数据更新完毕")
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def update_city_details():
"""
更新 details 表
:return:
"""
cursor = None
conn = None
try:
li = get_city_details()
print(f"{time.asctime()}开始更新历史数据")
conn, cursor = get_conn()
update_sql = "update city_details set update_time=%s,confirm=%s,dead=%s,heal=%s where country=%s and city=%s"
#sql = "insert into city_details(update_time,country,continent,confirm,confirm_add,dead,heal,nowConfirm) values(%s,%s,%s,%s,%s,%s,%s,%s)"
sql_query = 'select %s=(select update_time from city_details where country=%s and city=%s)'
for i in li:
cursor.execute(sql_query,[i[0],i[1],i[2]])
if not cursor.fetchone()[0]:
cursor.execute(update_sql,[i[0],i[4],i[5],i[6],i[1],i[2]])
conn.commit() # 提交事务 update delete insert操作
print(f"{time.asctime()}历史数据更新完毕")
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def city_details():
cursor = None
conn = None
try:
conn, cursor = get_conn()
sql = "select count(*) from city_details"
cursor.execute(sql)
num = cursor.fetchall()[0][0]
if num > 1:
update_city_details()
else:
insert_city_details()
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
def get_global_history():
url = "https://api.inews.qq.com/newsqa/v1/automation/modules/list?modules=FAutoGlobalStatis,FAutoGlobalDailyList,FAutoCountryConfirmAdd"
r = get_urltext(url)
res = json.loads(r.text)
data_all = res['data']
gd = []
for i in data_all['FAutoGlobalDailyList']:
ds = i["y"] + "." + i["date"]
tup = time.strptime(ds, "%Y.%m.%d")
ds = time.strftime("%Y.%m.%d", tup)
confirm = i['all']['confirm']
dead = i['all']['dead']
heal = i['all']['heal']
newAddConfirm = i['all']['newAddConfirm']
deadRate = i['all']['deadRate']
healRate = i['all']['healRate']
gd.append([ds, confirm, dead, heal, newAddConfirm, deadRate, healRate])
return gd
def update_global_history():
"""
更新历史数据
:return:
"""
cursor = None
conn = None
try:
gh = get_global_history() # 0 是历史数据字典,1 最新详细数据列表
print(f"{time.asctime()}开始更新历史数据")
conn, cursor = get_conn()
sql = "insert into global_history values(%s,%s,%s,%s,%s,%s,%s)"
sql_query = "select confirm from global_history where ds=%s"
for i in gh:
if not cursor.execute(sql_query, i[0]):
cursor.execute(sql, i)
conn.commit() # 提交事务 update delete insert操作
print(f"{time.asctime()}历史数据更新完毕")
except:
traceback.print_exc()
finally:
close_conn(conn, cursor)
if __name__ == "__main__":
print(sys.argv)
print("更新china_details中...")
china_details()
print("更新china_history中...")
update_history()
print("更新city_details中...")
city_details()
print("更新country_details中...")
country_details()
print("更新global_history中...")
update_global_history()
|
11,571 | 9440eff5ce7ed1fac9948a21808977d0bb848372 | from django.shortcuts import render
from django.contrib import messages
# Create your views here.
def home(request):
import requests
import json
api_request = requests.get("https://newsapi.org/v2/everything?sources=techradar&apiKey=96cc98dbabea497b921dcd946f249e78")
api = json.loads(api_request.content)
return render(request,'home.html',{'api':api})
def search(request):
if request.method == 'POST':
import requests
import json
lookups = request.POST.get('lookup')
lookup_request = requests.get("https://newsapi.org/v2/everything?sources=techradar&apiKey=96cc98dbabea497b921dcd946f249e78")
update = json.loads(lookup_request.content)
return render(request,'search.html',{'lookup':lookups,'update':update})
else:
messages.error(request, 'Not Found')
return render(request,'search.html')
|
11,572 | adfca1376e8dfed3f51c57ce5e7fb1ac32ec4b1a | import pandas as pd
import numpy as np
import requests
import json
from langdetect import detect
import nltk
from nltk.tokenize import sent_tokenize
nltk.download('punkt')
df = pd.read_csv('noNAN4Body.csv',encoding = "utf-8", low_memory=False)
methodName = 'doInBackground'
includedLines = []
includedbodies = []
sentences = []
for i in range(0,len(df)):
if df.iloc[i,0] == methodName:
includedLines.append(i)
includedbodies.append(df.iloc[i,1])
print(len(includedbodies))
for s in includedbodies:
sentences.append(sent_tokenize(s))
# flatten the list
sentences = [y for x in sentences for y in x if len(y) > 10]
prediction_df = pd.DataFrame(index = None)
prediction_df["sentences"] = sentences
prediction_df.to_csv("./methodFiles/doInBackground.csv",index=False,encoding='utf_8_sig' )
|
11,573 | b16e0e2e849a0899158c6b8649ef182748fb0d6e | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Импортируем библиотеку pygame
import pygame
from pygame import *
#Объявляем переменные
WIN_WIDTH = 800 # Ширина создаваемого окна
WIN_HEIGHT = 640 # Высота
DISPLAY = (WIN_WIDTH, WIN_HEIGHT) # Группируем ширину и высоту в одну переменную
BACKGROUND_COLOR = "#004400"
######################## platform
PLATFORM_WIDTH = 32
PLATFORM_HEIGHT = 32
PLATFORM_COLOR = "#FF6262"
#############################
level = [
"-------------------------",
"- -",
"- -",
"- -",
"- -- -",
"- -",
"-- -",
"- -",
"- --- -",
"- -",
"- -",
"- --- -",
"- -",
"- ----------- -",
"- -",
"- - -",
"- -- -",
"- -",
"- -",
"-------------------------"]
############################################
def main():
pygame.init() # Инициация PyGame, обязательная строчка
screen = pygame.display.set_mode(DISPLAY) # Создаем окошко
pygame.display.set_caption("Super Mario Boy") # Пишем в шапку
bg = Surface((WIN_WIDTH,WIN_HEIGHT)) # Создание видимой поверхности bg
# будем использовать как фон
bg.fill(Color(BACKGROUND_COLOR)) # Заливаем поверхность сплошным цветом
#####################################
############### рисование ячеек
x = y = 0 # координаты
for row in level: # по всем строкам
for col in row: # каждый символ
if col == "-":
#создаем блок, заливаем его цветом и рисеум его
pf = Surface((PLATFORM_WIDTH, PLATFORM_HEIGHT))
pf.fill(Color(PLATFORM_COLOR))
screen.blit(pf,(x,y))
x += PLATFORM_WIDTH # блоки платформы ставятся на ширине блоков
y += PLATFORM_HEIGHT # то же самое и с высотой
x = 0
#################################
while 1: # Основной цикл программы
for e in pygame.event.get(): # Обрабатываем события
if e.type == QUIT:
raise SystemExit, "QUIT"
# на каждой новой строчке начинаем с нуля
#####################рисование фона
screen.blit(bg, (0,0)) # Каждую итерацию необходимо всё перерисовывать
pygame.display.update() # обновление и вывод всех изменений на экран
if __name__ == "__main__":
main()
|
11,574 | 06f976d1797edf572aaa5aba0d755f95c56b3c26 | from typing import Sequence
import pandas as pd
from pandas.api import types as pdt
from visions.relations import IdentityRelation, TypeRelation
from visions.types.type import VisionsBaseType
def _get_relations(cls) -> Sequence[TypeRelation]:
from visions.types import Object
relations = [IdentityRelation(cls, Object)]
return relations
pandas_has_string_dtype_flag = hasattr(pdt, "is_string_dtype")
class String(VisionsBaseType):
"""**String** implementation of :class:`visions.types.type.VisionsBaseType`.
Examples:
>>> x = pd.Series(['rubin', 'carter', 'champion'])
>>> x in visions.String
True
"""
@classmethod
def get_relations(cls) -> Sequence[TypeRelation]:
return _get_relations(cls)
@classmethod
def contains_op(cls, series: pd.Series) -> bool:
# TODO: without the object check this passes string categories... is there a better way?
if pdt.is_categorical_dtype(series):
return False
elif not pdt.is_object_dtype(series):
if pandas_has_string_dtype_flag and pdt.is_string_dtype(series):
return True
return False
if series.hasnans:
series = series.dropna()
if series.empty:
return False
return all(isinstance(v, str) for v in series)
|
11,575 | 72afe244f76a37fa80b24b49fcc8617d3656b5d2 | # Copyright (c) 2020-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvidia.dali.pipeline import Pipeline
import nvidia.dali.fn as fn
import numpy as np
from nose_utils import raises
def get_sequence(shape, offset=0):
assert len(shape) > 1
elem_shape = shape.copy()
seq_length = elem_shape[0]
elem_shape[0] = 1
elems = []
for i in range(seq_length):
elems.append(np.full(elem_shape, offset + i))
return np.concatenate(elems, axis=0)
def get_sequences(batch_size, shape):
batch = []
for i in range(batch_size):
batch.append(get_sequence(shape, i * shape[0]))
return batch
def reorder_sample(sample, seq_len, order):
"""
Reorder sequence in one sample according to order parameter
"""
split = np.split(sample, seq_len)
reordered = []
for i in range(len(order)):
reordered.append(split[order[i]])
return np.concatenate(reordered, axis=0)
def reorder(input, seq_len, reorders, persample_reorder=True):
"""
Reorder the whole batch of sequences according to `reorders`
reorders is one list with new order or list of new_orders depending on `persample_reorder`
"""
result = []
for i, sample in enumerate(input):
order = reorders[i] if persample_reorder else reorders
result.append(reorder_sample(sample, seq_len, order))
return result
def to_batch(tl, batch_size):
return [np.array(tl[i]) for i in range(batch_size)]
def check_sequence_rearrange(batch_size, shape, reorders, persample_reorder=True, op_type="cpu",
layout=""):
pipe = Pipeline(batch_size=batch_size, num_threads=4, device_id=0)
with pipe:
input = fn.external_source(lambda: get_sequences(batch_size, shape), layout=layout)
frames = input.gpu() if op_type == "gpu" else input
order = fn.external_source(lambda: reorders) if persample_reorder else reorders
rearranged = fn.sequence_rearrange(frames, new_order=order, device=op_type)
pipe.set_outputs(rearranged, input)
pipe.build()
result, input = pipe.run()
if op_type == "gpu":
result = result.as_cpu()
input = to_batch(input, batch_size)
baseline = reorder(input, shape[0], reorders, persample_reorder)
for i in range(batch_size):
np.testing.assert_array_equal(result[i], baseline[i])
order_0 = ([3, 2, 1, 0], False)
order_1 = ([np.int32([3, 0]),
np.int32([2, 1]),
np.int32([1, 1]),
np.int32([0, 1, 2]),
np.int32([3])], True)
order_2 = ([np.int32([0]),
np.int32([1]),
np.int32([2]),
np.int32([3]),
np.int32([0, 1, 2, 3])], True)
def test_sequence_rearrange():
for dev in ["cpu", "gpu"]:
for shape in [[4, 3, 2], [5, 1]]:
for new_order, per_sample in [order_0, order_1, order_2]:
for layout in ["FHW"[:len(shape)], ""]:
yield check_sequence_rearrange, 5, shape, new_order, per_sample, dev, layout
def check_fail_sequence_rearrange(batch_size, shape, reorders, persample_reorder=True,
op_type="cpu", layout=""):
check_sequence_rearrange(batch_size, shape, reorders, persample_reorder, op_type, layout)
def test_fail_sequence_rearrange():
shape = [5, 1]
orders = [
([6, 7], False),
([-1], False),
([], False),
([np.int32([0]), np.int32([])], True),
([np.int32([6, 7]), np.int32([0])], True),
([np.int32([-1]), np.int32([0])], True),
([np.int32([[1], [2]]), np.int32([[1], [2]])], True)
]
error_msgs = [
'new_order[[]*[]] must be between * and input_sequence_length = * for sample *, but it is: *', # noqa:E501
'new_order[[]*[]] must be between * and input_sequence_length = * for sample *, but it is: *', # noqa:E501
'Empty result sequences are not allowed',
'Empty `new_order` for sample * is not allowed',
'new_order[[]*[]] must be between * and input_sequence_length = * for sample *, but it is: *', # noqa:E501
'new_order[[]*[]] must be between * and input_sequence_length = * for sample *, but it is: *', # noqa:E501
'Input with dimension * cannot be converted to dimension *'
]
assert len(orders) == len(error_msgs)
for dev in ["cpu", "gpu"]:
for [new_order, per_sample], error_msg in zip(orders, error_msgs):
yield raises(
RuntimeError,
glob=error_msg
)(check_fail_sequence_rearrange), 2, shape, new_order, per_sample, dev
def test_wrong_layouts_sequence_rearrange():
shape = [5, 1]
new_order = [0, 2, 1, 3, 4]
per_sample = False
for dev in ["cpu", "gpu"]:
for layout in ["HF", "HW"]:
yield raises(
RuntimeError,
glob=('Expected sequence as the input, where outermost dimension represents'
' frames dimension `F`, got data with layout = "H[WF]"')
)(check_fail_sequence_rearrange), 5, shape, new_order, per_sample, dev, layout
|
11,576 | d5f737bf11f27e2bf5bfc21b3498d66f9db38312 | # -*- coding: utf-8 -*-
"""
Created on Wed Jan 29 23:23:50 2020
@author: swedkm
"""
import geopandas as gpd
import pandas as pd
import os
from multiprocessing import Pool
def clean_sales_shp(county):
#create a directory for each county folder and set the path
directory = 'C:\Anaconda3\MN_Scraper\Metro_Region\MN_Metro_Shapes_2002_2015\Output/' + county
folder = os.listdir(directory)
path = [os.path.join(directory, s) for s in folder if '.shp' in s]
#set coordinate reference system to the crs in the first shapefile
coord = str(gpd.read_file(path[0]).crs)
#concatenate shapefiles into 1 aggregated shapefile
agrt = gpd.GeoDataFrame(pd.concat([gpd.read_file(p) for p in path]), crs=coord)
#edit aggregated shapefile to remove duplicate sales
agrt = agrt.drop_duplicates()
#identify relevant sales to go into sales geodataframe
keep_sale = []
sale_year = []
sale_month = []
address = []
full_address = []
for index, row in agrt.iterrows():
value = row['SALE_VALUE']
try:
date = row['SALE_DATE'].split('/')
year = date[2]
month = date[0]
except:
try:
date = row['SALE_DATE'].split('-')
year = date[2]
month = date[0]
except:
year = 0
month = 0
addrs = str(row['BLDG_NUM']) + ' ' + str(row['STREETNAME']) + ' ' + str(row['STREETTYPE']) + ' ' + str(row['SUFFIX_DIR'])
faddrs = addrs + ', ' + str(row['CITY']) + ', MN, ' + str(row['ZIP'])
use = row['USE1_DESC']
multi = row['MULTI_USES']
dwell = row['DWELL_TYPE']
if value == 0:
keep = 'no'
elif year < 2001 | year > 2015:
keep = 'no'
elif use != '100 Res 1 unit':
keep = 'no'
elif dwell != 'Single-Family / Owner Occupied':
keep = 'no'
elif multi != 'N':
keep = 'no'
else:
keep = 'yes'
keep_sale.append(keep)
sale_year.append(year)
sale_month.append(month)
address.append(addrs)
full_address.append(faddrs)
agrt['Keep Sale'] = keep_sale
agrt['Sale Year'] = sale_year
agrt['Sale Month'] = sale_month
agrt['Street Address'] = address
agrt['Full Address'] = full_address
sales = agrt[agrt['Keep Sale'] == 'yes']
output_file1 = 'C:\Anaconda3\MN_Scraper\Metro_Region\MN_Metro_Shapes_2002_2015\Output/' + county + '_clean.shp'
sales.to_file(output_file1)
################################### MAIN CODE BLOCK ######################################
if __name__ == "__main__":
county = ['Anoka', 'Carver', 'Dakota', 'Ramsey', 'Scott', 'Washington']
p = Pool(3)
result = p.map(clean_sales_shp, county)
p.terminate()
p.join() |
11,577 | f822f162f7dfa3e7f68dbdd3e0065bb40c5720c1 | import os
import shutil
from pathlib import Path
from flask import request, send_from_directory
from flask_restplus import Resource
from bson import ObjectId
from app.main.util.decorator import token_required
from app.main.model.database import *
from task_runner.runner import start_threads
from ..service.auth_helper import Auth
from ..util.dto import OrganizationDto
from ..util.errors import *
from ..config import get_config
from ..util.identicon import *
USERS_ROOT = Path(get_config().USERS_ROOT)
api = OrganizationDto.api
_organization = OrganizationDto.organization
@api.route('/')
class OrganizationList(Resource):
@token_required
@api.doc('List all organizations associated with the user')
def get(self, **kwargs):
ret = []
check = []
user_id = kwargs['user']['user_id']
user = User.objects(pk=user_id).first()
if not user:
return error_message(ENOENT, 'User not found'), 404
organizations = Organization.objects(owner=ObjectId(user_id), name__not__exact='Personal')
for organization in organizations:
ret.append({
'label': organization.name,
'owner': organization.owner.name,
'owner_email': organization.owner.email,
'value': str(organization.id)
})
check.append(organization)
for organization in user.organizations:
if organization in check or organization.name == 'Personal':
continue
ret.append({
'label': organization.name,
'owner': organization.owner.name,
'owner_email': organization.owner.email,
'value': str(organization.id)
})
return ret
@token_required
@api.doc('Create a new organization')
def post(self, **kwargs):
data = request.json
name = data.get('name', None)
if not name:
return error_message(EINVAL, 'Field name is required'), 400
user = User.objects(pk=kwargs['user']['user_id']).first()
if not user:
return error_message(ENOENT, 'User not found'), 404
org = Organization(name=name)
org.owner = user
org.members.append(user)
org.save()
user.organizations.append(org)
user.save()
org.path = name + '#' + str(org.id)
org_root = USERS_ROOT / org.path
try:
os.mkdir(org_root)
except FileExistsError as e:
return error_message(EEXIST), 401
img= render_identicon(hash(name), 27)
img.save(org_root / ('%s.png' % org.id))
org.avatar = '%s.png' % org.id
org.save()
@token_required
@api.doc('Delete a organization')
def delete(self, **kwargs):
organization_id = request.json.get('organization_id', None)
if not organization_id:
return error_message(EINVAL, "Field organization_id is required"), 400
organization = Organization.objects(pk=organization_id).first()
if not organization:
return error_message(ENOENT, "Team not found"), 404
user = User.objects(pk=kwargs['user']['user_id']).first()
if not user:
return error_message(ENOENT, "User not found"), 404
if organization.owner != user:
return error_message(EINVAL, 'You are not the organization owner'), 403
EventQueue.objects(organization=organization).update(to_delete=True, organization=None, team=None)
try:
shutil.rmtree(USERS_ROOT / organization.path)
except FileNotFoundError:
pass
User.objects.update(pull__organizations=organization)
# Tests belong to teams of the organization will be deleted as well by this query
tests = Test.objects(organization=organization)
for test in tests:
tasks = Task.objects(test=test)
for task in tasks:
TestResult.objects(task=task).delete()
tasks.delete()
tests.delete()
TaskQueue.objects(organization=organization).update(to_delete=True, organization=None, team=None)
teams = Team.objects(organization=organization)
for team in teams:
User.objects.update(pull__teams=team)
team.delete()
organization.delete()
@api.route('/avatar/<org_id>')
class OrganizationAvatar(Resource):
@api.doc('get the avatar for an organization')
def get(self, org_id):
auth_token = request.cookies.get('Admin-Token')
if auth_token:
payload = User.decode_auth_token(auth_token)
if not isinstance(payload, str):
user = User.objects(pk=payload['sub']).first()
if user:
org = Organization.objects(pk=org_id).first()
if org:
return send_from_directory(Path(os.getcwd()) / USERS_ROOT / org.path, org.avatar)
return error_message(USER_NOT_EXIST, 'Organization not found'), 404
return error_message(USER_NOT_EXIST), 404
return error_message(TOKEN_ILLEGAL, payload), 401
return error_message(TOKEN_REQUIRED), 400
@api.route('/member')
class OrganizationMember(Resource):
@token_required
@api.doc('Quit the organization')
def delete(self, **kwargs):
organization_id = request.json.get('organization_id', None)
if not organization_id:
return error_message(EINVAL, "Field organization_id is required"), 400
org_to_quit = Organization.objects(pk=organization_id).first()
if not org_to_quit:
return error_message(ENOENT, "Organization not found"), 404
user = User.objects(pk=kwargs['user']['user_id']).first()
if not user:
return error_message(ENOENT, "User not found"), 404
for organization in user.organizations:
if organization != org_to_quit:
continue
if organization.owner == user:
return error_message(EPERM, "Can't quit the organization as you are the owner"), 403
organization.modify(pull__members=user)
user.modify(pull__organizations=organization)
return error_message(SUCCESS), 200
else:
return error_message(EINVAL, "User is not in the organization"), 400
@api.route('/all')
class OrganizationListAll(Resource):
@token_required
@api.doc('List all organizations registered')
def get(self, **kwargs):
ret = []
organizations = Organization.objects(name__not__exact='Personal')
for organization in organizations:
r = {
'label': organization.name,
'owner': organization.owner.name,
'owner_email': organization.owner.email,
'value': str(organization.id)
}
ret.append(r)
return ret
@api.route('/include_team')
class OrganizationListAll(Resource):
@token_required
@api.doc('List all organizations registered')
def get(self, **kwargs):
ret = []
check = []
user_id = kwargs['user']['user_id']
user = User.objects(pk=user_id).first()
if not user:
return error_message(ENOENT, 'User not found'), 404
organizations = Organization.objects(owner=ObjectId(user_id))
for organization in organizations:
r = {
'label': organization.name,
'owner': organization.owner.name,
'owner_email': organization.owner.email,
'value': str(organization.id)
}
ret.append(r)
check.append(organization)
if not 'teams' in organization:
continue
if len(organization.teams) > 0:
r['children'] = []
for team in organization.teams:
r['children'].append({
'label': team.name,
'owner': team.owner.name,
'owner_email': team.owner.email,
'value': str(team.id)
})
for organization in user.organizations:
if organization in check:
continue
r = {
'label': organization.name,
'owner': organization.owner.name,
'owner_email': organization.owner.email,
'value': str(organization.id)
}
ret.append(r)
if not 'teams' in organization:
continue
if len(organization.teams) > 0:
r['children'] = []
for team in organization.teams:
r['children'].append({
'label': team.name,
'owner': team.owner.name,
'owner_email': team.owner.email,
'value': str(team.id)
})
return ret
@api.route('/join')
class OrganizationJoin(Resource):
@token_required
@api.doc('join an organization')
def post(self, **kwargs):
org_id = request.json.get('id', None)
if not org_id:
return error_message(EINVAL, "Field id is required"), 400
user = User.objects(pk=kwargs['user']['user_id']).first()
if not user:
return error_message(ENOENT, 'User not found'), 404
organization = Organization.objects(pk=org_id).first()
if not organization:
return error_message(ENOENT, 'Organization not found'), 404
if user not in organization.members:
organization.modify(push__members=user)
if organization not in user.organizations:
user.modify(push__organizations=organization)
start_threads(user)
@api.route('/users')
class OrganizationUsers(Resource):
@token_required
@api.doc('List all users of the specified organization')
def get(self, **kwargs):
organization_id = request.args.get('organization', None)
if organization_id:
organization = Organization.objects(pk=organization_id).first()
if not organization:
return error_message(ENOENT, 'Organization not found'), 404
return [{'value': str(m.id), 'label': m.name, 'email': m.email} for m in organization.members]
team_id = request.args.get('team', None)
if team_id:
team = Team.objects(pk=team_id).first()
if not team:
return error_message(ENOENT, 'Team not found'), 404
return [{'value': str(m.id), 'label': m.name, 'email': m.email} for m in team.members]
@api.route('/transfer')
class OrganizationTransfer(Resource):
@token_required
@api.doc('Transfer ownership of an organization to another authorized user')
def post(self, **kwargs):
user = User.objects(pk=kwargs['user']['user_id']).first()
if not user:
return error_message(ENOENT, 'User not found'), 404
organization_id = request.json.get('organization', None)
if not organization_id:
return error_message(EINVAL, 'Field organization is required'), 401
organization = Organization.objects(pk=organization_id).first()
if not organization:
return error_message(ENOENT, 'Organization not found'), 404
if organization.owner != user:
return error_message(EPERM, 'You are not the organization owner'), 403
owner_id = request.json.get('new_owner', None)
if not owner_id:
return error_message(EINVAL, 'Field new_owner is required'), 401
owner = User.objects(pk=owner_id).first()
if not owner:
return error_message(ENOENT, 'New owner not found'), 404
organization.owner = owner
organization.save()
|
11,578 | b91610f9854aa02da932597040fb30ba6d180963 | from presentation import elem, elem2, factorial, factorial2, _map, _map2, quicksort
# elem
assert elem(1, [1,2]) == True
assert elem(3, [1,2]) == False
assert elem(4, [1,2]) == False
assert elem(4, [4]) == True
assert elem(1, []) == False
# elem2
assert elem2(1, [1,2]) == True
assert elem2(4, [4]) == True
assert elem2(3, [1,2]) == False
assert elem2(4, [1,2]) == False
assert elem2(1, []) == False
# factorial
def test_factorial_of_1_is_1():
assert factorial(1) == 1
def test_factorial_of_2_is_2():
assert factorial(2) == 2
def test_factorial_of_3_is_6():
assert factorial(3) == 6
def test_factorial_of_4_is_24():
assert factorial(4) == 24
# factorial2
def test_factorial2_of_1_is_1():
assert factorial2(1) == 1
def test_factorial2_of_2_is_2():
assert factorial2(2) == 2
def test_factorial2_of_3_is_6():
assert factorial2(3) == 6
def test_factorial2_of_4_is_24():
assert factorial2(4) == 24
# _map
def test_map_plus_1_in_empty_list():
assert _map(lambda x: x + 1, []) == []
def test_map_plus_1_in_list_just_with_2():
assert _map(lambda x: x + 1, [1]) == [2]
def test_map_plus_1_in_a_list_with_2345():
assert _map(lambda x: x + 1, [1,2,3,4]) == [2,3,4,5]
# _map2
def test_map2_plus_1_in_empty_list():
assert _map2(lambda x: x + 1, []) == []
def test_map2_plus_1_in_list_just_with_2():
assert _map2(lambda x: x + 1, [1]) == [2]
def test_map2_plus_1_in_a_list_with_2345():
assert _map2(lambda x: x + 1, [1,2,3,4]) == [2,3,4,5]
# quicksort
def test_quicksort_empty_list():
assert quicksort([]) == []
def test_quicksort_list_with_just_one_element():
assert quicksort([1]) == [1]
def test_quicksort_list_with_two_elements_already_sorted():
assert quicksort([1, 2]) == [1, 2]
def test_quicksort_sort_2_1_in_1_2():
assert quicksort([2, 1]) == [1, 2]
def test_quicksort_sort_2_3_1_in_1_2_3():
assert quicksort([2, 3, 1]) == [1, 2, 3]
|
11,579 | 7a5bfbe81cb5c3b5aaab479131174970f3db906d |
{
'name': 'Task Log: limit Task by Project',
'version': '12.0.1.0.0',
'category': 'Human Resources',
'website': 'http://www.planet-odoo.com',
'author':
'Planet-odoo',
'installable': True,
'application': False,
'summary': (
'Limit task selection to tasks on currently-selected project'
),
'depends': [
'hr_timesheet',
],
}
|
11,580 | 508788436016b28ebd6842700dcdc9ed87739fe0 | import json
import itertools
from compliance_checker.base import BaseCheck, BaseNCCheck, check_has, score_group, Result
from pkgutil import get_data
class ACDDBaseCheck(BaseCheck):
###############################################################################
#
# HIGHLY RECOMMENDED
#
###############################################################################
@check_has(BaseCheck.HIGH)
def check_high(self, ds):
return ['title', 'summary', 'keywords']
###############################################################################
#
# RECOMMENDED
#
###############################################################################
@check_has(BaseCheck.MEDIUM)
def check_recommended(self, ds):
return [
'id',
'naming_authority',
'keywords_vocabulary',
('cdm_data_type', ['vector', 'grid', 'textTable', 'tin', 'stereoModel', 'video']),
'history',
'comment',
'date_created',
'creator_name',
'creator_url',
'creator_email',
'institution',
'project',
'processing_level',
'acknowledgement',
'geospatial_lat_min',
'geospatial_lat_max',
'geospatial_lon_min',
'geospatial_lon_max',
'geospatial_vertical_min',
'geospatial_vertical_max',
'time_coverage_start',
'time_coverage_end',
'time_coverage_duration',
'time_coverage_resolution',
'standard_name_vocabulary',
'license'
]
###############################################################################
#
# SUGGESTED
#
###############################################################################
@check_has(BaseCheck.LOW)
def check_suggested(self, ds):
return [
'contributor_name',
('contributor_role', ['principalInvestigator', 'author']),
'publisher_name', # publisher,dataCenter
'publisher_url', # publisher
'publisher_email', # publisher
'date_modified',
'date_issued',
'geospatial_lat_units',
'geospatial_lat_resolution',
'geospatial_lon_units',
'geospatial_lon_resolution',
'geospatial_vertical_units',
'geospatial_vertical_resolution',
'geospatial_vertical_positive'
]
###############################################################################
#
# HIGHLY RECOMMENDED VARIABLE ATTRS
#
###############################################################################
def _get_vars(self, ds, attr_filter=None):
vars = ds.dogma._eval_xpath('//ncml:variable')
if attr_filter is not None:
attrs = itertools.chain.from_iterable((v.xpath('ncml:attribute[@name="%s"]/@value' % attr_filter, namespaces=ds.dogma._namespaces) or [None] for v in vars))
names = (v.get('name', 'unknown') for v in vars)
attrs = zip(attrs, names)
return attrs
return vars
@score_group('varattr')
def check_var_long_name(self, ds):
vars = self._get_vars(ds, 'long_name')
retval = [Result(BaseCheck.HIGH, v[0] is not None, (v[1], "var_long_name")) for v in vars]
return retval
@score_group('varattr')
def check_var_standard_name(self, ds):
vars = self._get_vars(ds, 'standard_name')
retval = [Result(BaseCheck.HIGH, v[0] is not None, (v[1], "var_std_name")) for v in vars]
return retval
@score_group('varattr')
def check_var_units(self, ds):
vars = self._get_vars(ds, 'units')
retval = [Result(BaseCheck.HIGH, v[0] is not None, (v[1], "var_units")) for v in vars]
return retval
@score_group('varattr')
def check_var_coverage_content_type(self, ds):
vars = self._get_vars(ds, 'coverage_content_type')
allowed = ['image','thematicClassification','physicalMeasurement','auxiliaryInformation','qualityInformation','referenceInformation','modelResult','coordinate']
retval = [Result(BaseCheck.HIGH, v[0] is not None and v[0] in allowed, (v[1], "var_coverage_content_type")) for v in vars]
return retval
class ACDDNCCheck(BaseNCCheck, ACDDBaseCheck):
@classmethod
def beliefs(cls):
f = get_data("compliance_checker", "data/acdd-ncml.json")
beliefs = json.loads(f)
# strip out metadata
return {k:v for k,v in beliefs.iteritems() if not k.startswith("__")}
|
11,581 | 5bbb0a6ae6e09d81905dffb1ba683aab3d12312b | import sys
import numpy
import queue as Q
# build a priority queue for annotated peaks
def hashOrder(file):
RNA = {}
f = open(file)
while True:
line = f.readline()
if not line:
break
line = line.strip('\n').split("\t")
key = line[0]
# start and end postion
pair = (int(line[1]), int(line[2]))
# range and miRNA/gene name
if key not in RNA:
RNA[key] = Q.PriorityQueue()
RNA[key].put((pair))
for key in RNA.keys():
record = []
while not RNA[key].empty():
record.append(RNA[key].get())
RNA[key] = record
return RNA
def matchPeak(file1, file2):
# first
exper = hashOrder(file1)
# second
refer = hashOrder(file2)
com = set()
f1 = open(file3, "w+")
for indexs in exper.keys():
if indexs in refer.keys():
# first
sample1 = exper[indexs]
# second
sample2 = refer[indexs]
for one in sample1:
for each in sample2:
# if two peaks from two different samples overlap, they are considered as common peaks
if not one[0] > each[1] and not each[0] > one[1]:
com.add( indexs + "\t" + str(one[0]) + "\t" + str(one[1]) + "\n")
com.add( indexs + "\t" + str(each[0]) + "\t" + str(each[1]) + "\n")
for each in com:
f1.write(each)
# peak from sample 1
file1 = sys.argv[1]
# peak from sample 2
file2 = sys.argv[2]
# output file for common peaks
file3 = sys.argv[3]
matchPeak(file1, file2, file3)
|
11,582 | dec288b980c0290aed712b0b63495ea79f3c4a7e | """
advancedSkeleton @ utils
"""
import maya.cmds as mc
import maya.mel as mm
def fixCtrlColors():
"""
Module to fix advanced skeleton auto rig controls colors to match the standard colors
:return: None
"""
#List all the controls in the scene to change color
controlsShapes = mc.ls( type = 'nurbsCurve', s = True )
[ mc.setAttr( s + '.ove', 1 ) for s in controlsShapes ]
l_mainControls = []
r_mainControls = []
l_secondaryControls = []
r_secondaryControls = []
switchControls = []
m_fkControls = []
m_ikControls = []
for s in controlsShapes:
if s.endswith( 'LShape' ):
l_mainControls.append( s )
elif s.endswith( 'RShape' ):
r_mainControls.append( s )
elif s.startswith( 'FK' ) and s.endswith( 'MShape' ):
m_fkControls.append( s )
elif s.startswith( 'IK' ) and s.endswith( 'MShape' ):
m_ikControls.append( s )
for shape in l_mainControls:
if 'Finger' in shape:
l_secondaryControls.append( shape )
for shape in r_mainControls:
if 'Finger' in shape:
r_secondaryControls.append( shape )
for shape in controlsShapes:
if shape.startswith( 'FKIK' ):
switchControls.append( shape )
# set colors
[ mc.setAttr( s + '.ovc', 6) for s in l_mainControls ]
[ mc.setAttr( s + '.ovc', 13) for s in r_mainControls ]
[ mc.setAttr( s + '.ovc', 18) for s in l_secondaryControls ]
[ mc.setAttr( s + '.ovc', 21) for s in r_secondaryControls ]
[ mc.setAttr( s + '.ovc', 17) for s in m_fkControls ]
[ mc.setAttr( s + '.ovc', 14) for s in m_ikControls ]
[ mc.setAttr( s + '.ovc', 22) for s in switchControls ]
print 'color changed!'
def mirrorCvPositions( cvs, mode = 'x' ):
"""
:param cvs: list of vertex usually take it from mc.ls(sl = 1, fl = 1)
:param mode: mirror axis , 'x' by default
:return: none
"""
for cv in cvs:
strName = str( cv ).split( '|' )[-1]
cvShortLeft = str( cv ).split( '.' )[-2]
cvNumber = str( cv ).split( '.' )[-1]
prefixName = cvShortLeft.split('_')[-2]
cvShortRight = ''
if cvShortLeft.endswith( '_LShape' ): cvShortRight = prefixName + '_RShape' + '.' + cvNumber
elif cvShortLeft.endswith( '_RShape' ): cvShortRight = prefixName + '_LShape' + '.' + cvNumber
else:
mm.eval( 'warning("advancedSkeleton.mirrorCvPositions: cannot find mirror for ' + strName + ', doesn`t endswith with _L or _R")' )
continue
pos = mc.xform( strName, q = 1, ws = 1, t = 1 )
if mode == 'x': mc.xform( cvShortRight, ws = 1, t = [ pos[0] * ( -1 ), pos[1], pos[2] ] )
if mode == 'z': mc.xform( cvShortRight, ws = 1, t = [ pos[0], pos[1], pos[2] * ( -1 ) ] )
|
11,583 | 667ab0cf5fdc5a746955eafad125cdbe23e9e025 | import logging, sys
from osdemo.core.kernel import Kernel
from osdemo.shell.commands import Commands
sys.path.append('lib/pyshell')
from pyshell.model.shell import Shell
from pyshell.gui.shellframe import ShellFrame
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s | %(message)s', level=logging.INFO)
k = Kernel()
shell = Shell()
shell.addCommand("load", Commands(k).load)
shell.addCommand("scheduler", Commands(k).scheduler)
shell.addCommand("memory", Commands(k).memory)
ShellFrame(shell).start()
|
11,584 | 5ee65c0bacc76a7358aa4febf0c98573cc0ebcef | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import QApplication
app = QApplication([])
clipboard = app.clipboard()
def on_clipboard_change():
data = clipboard.mimeData()
if data.hasFormat('text/uri-list'):
for path in data.urls():
print(path)
if data.hasText():
print(data.text())
clipboard.dataChanged.connect(on_clipboard_change)
app.exec_() |
11,585 | 530ff8c2f9439a213a126f074fdfaa0156727599 | from django.contrib import admin
from .models import Shop, Street, City
@admin.register(City)
class CityAdmin(admin.ModelAdmin):
pass
@admin.register(Street)
class StreetAdmin(admin.ModelAdmin):
list_display = ('name', 'city')
list_filter = ('city',)
@admin.register(Shop)
class ShopAdmin(admin.ModelAdmin):
list_display = ('street', 'city')
|
11,586 | d07c1020de2996021666f7a5c31a2ec9f59a8a50 | '''
File:
insteon_message.py
Description:
A set of classes for Insteon support.
Author(s):
Chris Van Orman
License:
This free software is licensed under the terms of the GNU public license, Version 1
Usage:
Example:
Notes:
Created on Mar 11, 2013
'''
from pytomation.interfaces.common import Command, PytomationObject
def _byteIdToStringId(idHigh, idMid, idLow):
return '%02X.%02X.%02X' % (idHigh, idMid, idLow)
class InsteonMessage(PytomationObject):
commands = {
'0x11': Command.ON,
'0x13': Command.OFF,
'0x2e': None
}
def __init__(self, code, length):
super(InsteonMessage, self).__init__()
self._length = length
self._data = []
def appendData(self, value):
self._data.append(value)
def getData(self):
return self._data
def getLength(self):
return self._length
def _getCommands(self):
return []
def getCommands(self):
commands = []
try:
commands = self._getCommands()
except Exception as e:
self._logger.debug("Exception %s" % e)
commands = commands if commands else []
return { 'data': self._data, 'commands': commands }
def isComplete(self):
return self.getLength() == len(self._data)
class InsteonEchoMessage(InsteonMessage):
def __init__(self):
super(InsteonEchoMessage, self).__init__(0x62, 9)
def getLength(self):
isExtended = len(self._data) >= 6 and (self._data[5] & 16 == 16)
return 23 if isExtended else self._length
class InsteonExtendedMessage(InsteonMessage):
def __init__(self):
super(InsteonExtendedMessage, self).__init__(0x51, 25)
def _getCommands(self):
ledState = self._data[21]
commands = []
address = _byteIdToStringId(self._data[2], self._data[3], self._data[4])
# led status
for i in range(0, 8):
commands.append({'command': Command.ON if (ledState & (1 << i)) else Command.OFF, 'address': (address + ':%02X' % (i + 1))})
return commands
class InsteonStatusMessage(InsteonMessage):
def __init__(self):
super(InsteonStatusMessage, self).__init__(0x50, 11)
def _commandFromLevel(self, level):
command = Command.ON if level >= 250 else Command.OFF
command = ((Command.LEVEL, level)) if level > 2 and level < 250 else command
return command
def _getCommands(self):
flags = self._data[8]
cmd1 = self._data[9]
cmd2 = self._data[10]
# Read the flags
isAck = (flags & 32) == 32
isGroup = (flags & 64) == 64
isBroadcast = (flags & 128) == 128
isDirectAck = isAck and not isGroup and not isBroadcast # ack from direct command (on,off,status,etc)
isGroupCleanup = isGroup and not isAck and not isBroadcast # manually activated scene
isGroupBroadcast = not isAck and isGroup and isBroadcast # manually activated scene
isGroupAck = isAck and isGroup and not isBroadcast # plm activated scene ack of individual device
address = _byteIdToStringId(self._data[2], self._data[3], self._data[4])
commands = []
command = None
# lookup the command if we have it, though this isn't very reliable.
if (hex(cmd1) in self.commands):
command = self.commands[hex(self._data[9])]
if (isDirectAck and cmd1 != 0x2e):
# Set the on level from cmd2 since cmd1 is not consistent on status messages.
# We ignore 0x2e here because that is an ACK for extended messages and is always 0.
command = self._commandFromLevel(cmd2)
elif (isGroupBroadcast):
# group 1 means the main load of the switch was turned on.
if (self._data[7] == 1):
commands.append({'command': command, 'address': address})
# This is a scene message, so we should notify the scene.
address += ':%02X' % self._data[7]
elif (isGroupCleanup and cmd2 != 0):
# This is a scene message, so we should notify the scene.
address += ':%02X' % cmd2
elif (isGroupAck):
# This is an All-Link Cleanup. Notify the scene not the ack'ing device.
address = '00.00.%02X' % cmd2
commands.append({'command': command, 'address': address })
return commands
|
11,587 | 4478df320db5841fc9e4a6348d25c9bcfd755df2 | # https://leetcode.com/problems/ransom-note
from collections import Counter
class Solution:
def canConstruct(self, ransomNote: str, magazine: str) -> bool:
r_cnt = Counter(ransomNote)
m_cnt = Counter(magazine)
for key in r_cnt:
if r_cnt[key] > m_cnt[key]:
return False
return True
|
11,588 | 4048f88ccf12d026c1eefff7165927cfad372bef | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import click
import os
from . import utils
from . import om,r
from . import path
@click.group()
def tool():
pass
# package commands
@click.command()
def build():
"""Prepare packages and wheel locally"""
click.secho('Creating package ...')
pkg = utils.create_package()
click.secho('Package created: {}'.format(pkg), fg='green')
click.secho('Creating wheel...')
wheel_path = utils.create_wheel()
click.secho('Wheel created in {}'.format(wheel_path), fg='green')
tool.add_command(build)
# application commands
@click.command()
@click.argument('pdf_dir')
@click.argument('zone_data_file')
def run_evaluation(pdf_dir, zone_data_file):
"""Run evaluation on a PDF dir"""
if not os.path.isdir(pdf_dir):
click.secho("Input MUST be a valid directory (got {})".format(pdf_dir), fg='red', err=True)
return
# init working dirs
path.init_dirs()
# Extract signature regions from PDF file(s) and save in BMP dir
omr.extract_signatures_from_dir(pdf_dir, zone_data_file)
# Generate vectorized images from raster images
click.secho("Generating SVG files ...", fg='blue')
omr.generate_svg(path.BMP_DIR)
# Run SVG path evaluation, generate results CSV
click.secho("Running evaluation ...", fg='blue')
omr.run_evaluation(path.SVG_DIR)
# working dirs are useful for debugging purposes
path.cleanup_dirs()
tool.add_command(run_evaluation)
if __name__ == '__main__':
tool()
|
11,589 | b536616a86d463a2043575230ae2618d661d1300 | import ctypes
import pathlib
libfile = ctypes.CDLL(
# relative to this module file
str(pathlib.Path(__file__).parent.absolute() / 'libcrashme.so')
)
c_crash_me = libfile.crash_me
c_crash_me.argtypes = []
def crash_me() -> None:
c_crash_me()
__all__ = [ 'crash_me' ]
|
11,590 | f9e80375b17cb3704b8ed808e9d91f47e84dfd78 | # Extract a shapefile from a gzipped tar archive
# https://github.com/GeospatialPython/Learning/raw/master/hancock.zip
import tarfile
tar = tarfile.open("hancock.tar.gz", "r:gz")
tar.extractall()
tar.close()
|
11,591 | af63447ebf21a36aed88a3069dddcdbe2b854da2 | from flask import Flask, jsonify, request
from hardwareController import *
from datetime import datetime
from apscheduler.schedulers.blocking import BlockingScheduler
import _thread
from apscheduler.schedulers.asyncio import AsyncIOScheduler
app = Flask(__name__)
feedevents = []
waterChangeLog = 'none'
@app.route("/feedevents", methods=['GET', 'POST'])
def feedEventsGetAndPost():
global feedevents
if request.method == 'GET':
if len(feedevents) == 0:
return "none"
else:
return jsonify(feedevents)
else: # POST method
newId = len(feedevents)
newFeedTime = request.form['feedtime']
newFeedDuration = request.form['feedduration']
newFeedElement = {
'id': newId,
'feedtime': newFeedTime,
'feedduration': newFeedDuration
}
feedevents.append(newFeedElement)
return jsonify(feedevents)
@app.route("/feedevents/<id>", methods=['PUT', 'DELETE'])
def feedEventsPutAndDelete(id):
global feedevents
if request.method == 'PUT':
updateFeedTime = request.form['feedtime']
updateDuration = request.form['feedduration']
if len(feedevents) > int(id) and len(feedevents) > 0:
for element in feedevents:
print(element)
if element['id'] == int(id):
element['feedtime'] = updateFeedTime
element['feedduration'] = updateDuration
break
return jsonify(feedevents)
else:
return "none"
else: # delete request
if len(feedevents) > int(id) and len(feedevents) > 0:
element2Remove = None
for element in feedevents:
if element['id'] == int(id):
element2Remove = element
feedevents.remove(element)
break;
prevId = 0
for element in feedevents:
if element['id'] == prevId + 1:
element['id'] = prevId
prevId += 1
return jsonify(feedevents)
else:
return "none"
@app.route("/feedaction", methods=['GET'])
def feedAction():
try:
if len(request.form) == 0:
pwm = valve_init()
valve_action(pwm)
return 'success'
elif (request.form['feedduration'] != None):
pwm = valve_init()
valve_action(pwm, int(request.form['feedduration']))
return 'success'
except:
return 'wrong request'
@app.route("/waterchangelog", methods=['GET', 'POST'])
def changeLogs():
global waterChangeLog
if request.method == 'GET':
return waterChangeLog
elif request.method == 'POST':
waterChangeLog = datetime.now().strftime('%Y,%b,%d')
print(waterChangeLog)
return waterChangeLog
def feedevents_executor():
global feedevents
current = datetime.now().strftime("%H,%M")
for feedevent in feedevents:
scheduled = datetime.strptime(feedevent['feedtime'], "%I:%M%p").strftime("%H,%M")
if current == scheduled:
pwm = valve_init()
valve_action(pwm, int(feedevent['feedduration']))
def setupFeedEventsExecutor(threadName, delay):
# If I am using scheduler, the task is running on a separate process
# the global variable "feedevents" are not supported to shared between processes
#scheduler = AsyncIOScheduler()
#scheduler.add_executor('processpool')
#scheduler.add_job(feedevents_executor, 'interval', seconds=5)
#try:
# scheduler.start()
# print('scheduler started!')
#except (KeyboardInterrupt, SystemExit):
# pass
# An issue here ! I don't have more time to dig this out
# But thread is running twice, and one of twice cannot see feedevents' update
while True:
time.sleep(30)
feedevents_executor()
if (__name__ == "__main__"):
_thread.start_new_thread(setupFeedEventsExecutor, ("Thread-1", 0, ))
app.run(host='0.0.0.0', port=5000, debug=True)
|
11,592 | 2ece032748d38e8efbf9c401919aa32c52069450 | import socket
HOST,PORT=('127.0.0.1',8080)
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((HOST,PORT))
while True:
s_data=input('==>:').strip()
if not s_data:continue
s.send(s_data.encode('utf-8'))
rec_data=s.recv(1024)
print(rec_data.decode('utf-8')) |
11,593 | 3b783857bd2ec30c6216f4edb7402a3ffca44b8e | '''
用 requests 库发起一个HEAD请求,并从响应中提取出一些HTTP头数据的字段
'''
import requests
resp = requests.head('http://www.python.org/index.html')
status = resp.status_code
last_modified = resp.headers['last-modified']
content_type = resp.headers['content-type']
content_length = resp.headers['content-length']
|
11,594 | 2b33e4d02bc4124bd808e94c7db9ed9ed935aeb2 | def CountVowelConstChar(S1):
count_v = 0
count_c = 0
count_ch = 0
i = 0
while i < len(S1):
if not S1[i].isalpha():
count_ch += 1
elif S1[i] == 'a' or S1[i] == 'e' or S1[i] == 'i' or S1[i] == 'o' or S1[i] == 'u':
count_v += 1
else:
count_c += 1
i += 1
return count_v,count_c,count_ch
def main():
S1 = eval(input("Enter an alpha-numeric string with characters :: "))
count_v, count_c, count_ch = CountVowelConstChar(S1)
print("The string has {} vowels, {} consonants and {} characters . ".format(count_v, count_c, count_ch))
if __name__ == '__main__':
main()
|
11,595 | 897c5e6d0b2b0818b221a6ac89f199807d547969 | class Configuration:
DEBUG=True
SQLALCHEMY_TRACK_MODIFICATIONS=False
SQLALCHEMY_DATABASE_URI='mysql+mysqlconnector://root:1111@localhost/database1'
SECRET_KEY='SECRET_KEY'
### flask security ###
SECURITY_PASSWORD_SALT='$2b$12$ctTq5EgEL6MyWooqQGwCe..cGsgCpyXVN08QartxThUNRJRD/PIai]'
SECURITY_PASSWORD_HASH='bcrypt' |
11,596 | 126261649978dce20cfca899323ec4ec68319d8a | from ShapeDescriptor import *
class VanishPencil:
def __init__(self, vanishRay=None):
self.vanishRays = None
if vanishRay:
self.vanishRays = [vanishRay] # List of vanishRays
def updateVanishRay(self, newVanishRay):
if self.vanishRays:
newWeight = newVanishRay.crossRatioVectorLength
oldWeight = self.vanishRays[0].crossRatioVectorLength
if newWeight > oldWeight:
self.vanishRays = [newVanishRay]
elif newWeight == oldWeight:
self.vanishRays.append(newVanishRay)
else:
self.vanishRays = [newVanishRay]
class VanishPencilsTable:
def __init__(self):
self.pencils = {} # {(pencil_id_1, vanishPencil_1), (pencil_id_2, vanishPencil_2)}
def getPencils(self):
return self.pencils.items()
def getVanishPoints(self):
vanishPoints = []
for vanishPencil in self.pencils.values():
rays = vanishPencil.vanishRays
for ray in rays:
vanishPoints.append((ray.vanishPoint, ray.pencil_id))
return vanishPoints
def getVirtualPoints(self):
virtualPoints = []
for vanishPencil in self.pencils.values():
rays = vanishPencil.vanishRays
(xm, ym, wm) = (0, 0, 0)
n = len(rays)
for ray in rays:
(vPi, iDi) = (ray.vanishPoint, ray.pencil_id)
(xi, yi) = vPi.toTuple()
wm = vPi.w
xm += xi
ym += yi
virtualPoints.append((WeightedPoint(xm/n, ym/n, wm), iDi))
print("virtualPoints = ", virtualPoints)
return virtualPoints
def updatePencil(self, pencil_id, vanishRay):
key = pencil_id
if key in self.pencils:
self.pencils[key].updateVanishRay(vanishRay)
else:
value = VanishPencil(vanishRay)
self.pencils.update({key:value}) |
11,597 | 75757d348fa4a7c564d510051894e451f090abc8 | __author__ = 'Chelsea | Michael'
import unittest
from Conect4_View import View
from unittest.mock import patch
from io import StringIO
class TestViewShowBoard(unittest.TestCase):
""" Checks the visual aspect of the game for the user interface """
def setUp(self):
"""Inits empty grid for testing"""
self.theView = View()
def tearDown(self):
""" Closes the view """
del self.theView
@patch('sys.stdout', new_callable=StringIO)
def test_show_the_board(self, mock_stdout):
"""visually represent the grid as a connect 4 game """
# first_row = " 1 2 3 4 5 6 7"
currentBoard =\
"\n"\
" 1 2 3 4 5 6 7\n"\
"| | | | | | | |\n"\
"| | | | | | | |\n"\
"| | | | | | | |\n"\
"| | | | | | | |\n"\
"| | | | | | | |\n"\
"| | | | | | | |\n"\
"---------------\n"\
" ^ ^ \n"\
self.theView.show_board([[" "] * 6 for x in range(7)])
self.assertEqual(mock_stdout.getvalue(), currentBoard)
@patch('sys.stdout', new_callable=StringIO)
def show_one_row_in_board(self, mock_stdout):
pass
#def test_show_the_board(self):
# """ visually represent the grid as a connect 4 game """
# self.theView.show_board()
if __name__ == '__main__':
unittest.main()
|
11,598 | 100d1af34b3fd24243fa1d9d471b687ffa6120b0 | #! /usr/bin/env python
#For Python, this file uses encoding: utf-8
import sys
import numpy as np
import math as mat
from tabulate import tabulate
def esBroadcast(packet):
return packet.dst == "ff:ff:ff:ff:ff:ff"
def entropia( probaPorSimbolo ):
res = 0
for s,p in probaPorSimbolo.items():
res -= p * mat.log( p, 2)
return res
def informacionPorSimbolo(probaPorSimbolo):
res = {}
for simbolo,probabilidad in probaPorSimbolo.items():
informacion = (-1) * mat.log( probabilidad, 2)
res[simbolo] = informacion
return res
#Formato: tabla[simbolo, probabilidad, informacion]
def rankearDistinguidosXInformacion(tablaOrdenadaXInfo):
informacionesOrdenadas = []
for linea in tablaOrdenadaXInfo:
informacionesOrdenadas.append(linea[2])
p1 = np.percentile(informacionesOrdenadas, 25)
p3 = np.percentile(informacionesOrdenadas, 75)
distinguidosA = []
distinguidosB = []
for linea in tablaOrdenadaXInfo:
if linea[2]<= p1:
distinguidosA.append(linea[0])
elif linea[2]>= p3:
distinguidosB.insert(0, linea[0])
ranking = []
i = 0
while i < max(len(distinguidosA),len(distinguidosB)):
if i < len(distinguidosA) and i < len(distinguidosB):
ranking.append([distinguidosA[i], distinguidosB[i]])
elif i >= len(distinguidosA):
ranking.append(["-----------", distinguidosB[i]])
else:
ranking.append([distinguidosA[i],"-----------"])
i+=1
print "\nRanking de simbolos distinguidos segun la informacion que aportan acerca de la fuente..."
print "Distinguidos clase A: Simbolos cuya informacion se encuentra por debajo del primer percentil. A menor informacion, mas distinguido entre los de la clase A."
print "Distinguidos clase B: Simbolos cuya informacion se encuentra por encima del tercer percentil. A mayor informacion, mas distinguido entre los de la clase B.\n"
print(tabulate(ranking, headers=['Distinguidos clase A (de mas distinguido a menos distinguido)', 'Distinguidos clase B (de mas distinguido a menos distinguido)']))
from scapy.all import *
def main(archivo,modeloAUtilizar):
print "Leyendo archivo..."
pcapFile = rdpcap( archivo )
totalDePaquetes = 0
broadcastCount = 0
unicastCount = 0
protocolos = set()
simbolosPosibles = set()
contadorDeSimbolos = {}
broadcast = "BROADCAST"
unicast = "UNICAST"
print "Analizando la fuente..."
if (modeloAUtilizar == 1):
totalDePaquetes = len(pcapFile)
for packet in pcapFile:
primerComponente = ""
protocolo = packet.payload.name
if esBroadcast(packet):
broadcastCount += 1
primerComponente = broadcast
else:
unicastCount += 1
primerComponente = unicast
protocolos.add(protocolo)
simbolosPosibles.add( (broadcast, protocolo) )
simbolosPosibles.add( (unicast, protocolo) )
simbolo = (primerComponente, protocolo)
if simbolo in contadorDeSimbolos:
contadorDeSimbolos[simbolo] += 1
else:
contadorDeSimbolos[simbolo] = 1
elif (modeloAUtilizar == 2):
for packet in pcapFile:
if packet.payload.name == "ARP" and esBroadcast(packet):
totalDePaquetes += 1
simbolo = packet.payload.pdst
simbolosPosibles.add(simbolo)
if simbolo in contadorDeSimbolos:
contadorDeSimbolos[simbolo] += 1
else:
contadorDeSimbolos[simbolo] = 1
else:
#Esto en realidad no se ejecuta porque se chequea antes pero lo dejo por las dudas.
print "Uso incorrecto, el segundo parametro deber ser 1 o 2."
print "Uso: python script.py pcapFile modeloAUtilizar(1/2)\nDonde modeloAUtilizar es 1 si no se distinguen los host y 2 en caso contrario. "
sys.exit()
probaPorSimbolo = dict((key, float(value)/totalDePaquetes) for (key,value) in contadorDeSimbolos.items())
informacionXSimbolo = informacionPorSimbolo(probaPorSimbolo)
return (probaPorSimbolo,informacionXSimbolo,totalDePaquetes,broadcastCount,protocolos)
def armarTabla(probabilidades,informaciones,cantidadDePaquetes,cantidadBroadcast,protocolos):
#Impresiones
entropiaMuestral = entropia(probabilidades)
entropiaMaxima = 0
if len(probabilidades) > 0:
entropiaMaxima = mat.log(len(probabilidades),2)
print "\nSize de la muestra: " + str(cantidadDePaquetes)
print " "
tabla = []
for s,p in probabilidades.items():
tabla.append([s,p,informaciones[s]])
#ordena por informacion de forma creciente
tabla.sort(key=lambda linea : linea[2], reverse = False)
print(tabulate(tabla, headers=['Simbolo', 'Probabilidad', 'Informacion']))
print "\nEntropia muestral: "
print "\t\t",entropiaMuestral
print "Entropia Maxima: "
print "\t\t",entropiaMaxima
if ( int(sys.argv[2] )==1):
print "Porcentaje de tráfico broadcast: "
print "\t\t",str(float(cantidadBroadcast)/cantidadDePaquetes) + "%"
print " "
if(len(probabilidades)>0):
print "¿Desea ver un ranking de los simbolos distinguidos?(S/N)"
respuesta = ""
while True:
respuesta = raw_input()
if respuesta == "S":
rankearDistinguidosXInformacion(tabla)
#A mayor informacion menor probabilidad
sys.exit()
elif respuesta == "N":
sys.exit()
else:
print "Entrada inválida."
if __name__ == '__main__':
if len(sys.argv) != 3 or (int(sys.argv[2])!= 1 and int(sys.argv[2])!=2) :
print "Uso: python script.py pcapFile modeloAUtilizar(1/2)\nDonde modeloAUtilizar es 1 si no se distinguen los host y 2 en caso contrario. "
sys.exit()
else:
armarTabla(*main(sys.argv[1],int(sys.argv[2])))
|
11,599 | 17ed8e2e44467cb16afef5ab964897921f1afbcf | import pandas as pd
from geopy.geocoders import Yandex
import time,os.path
geolocator = Yandex( timeout=1) # proxies = proxies -
def formAddress(distr, street, house, korpus):
#print(distr, street)
oneAddr = str(distr) + ' район, ' + ' ' + str(street)
if house != '' and house != 'NaN':
oneAddr += ', ' + str(house)
if korpus != '' and str(korpus).lower() != 'nan' and str(korpus)!='1':
oneAddr += 'к' + str(korpus)
#print("distr = " +str(distr) + "; street = " + str(street) + "; house = " + str(house) + "; korpus = " + str(korpus) + "\n")
#print(oneAddr)
return "Санкт-Петербург, {0}".format(oneAddr)
def getGeocoderLatLon_crim(distr, street, house, korpus):
#print(addr)
oneAddr = formAddress(distr,street,house,korpus)
print(oneAddr)
#oneAddr = str(distr)+' район, '+' '+str(street)
#if house!='' and house != 'NaN':
#oneAddr += ', дом '+str(house)
#else:
#if korpus!='' and korpus != 'NaN':
#oneAddr += ', корпус '+str(korpus)
#print(oneAddr)
while True:
try:
addr = "Санкт-Петербург, {0}".format(oneAddr)
location = geolocator.geocode(addr)
time.sleep(1)
break
except:
print("Network Error!! Wait five seconds!")
#time.sleep(5)
location = None
break
if (location):
return (location.latitude, location.longitude)
else:
return (0.0, 0.0)
def getLatLon(distr, addr, house, korpus, objaddr_detected_dic):
#Getting latlon either from the previous data (objaddr_detected_list) if any, or through geocoder if not found
search_string = formAddress(distr, addr, house, korpus).upper()
#if(korpus!='' and korpus!= "NaN"):
#print(korpus)
if search_string in objaddr_detected_dic.keys():
#print('Found: ')
#print(search_string)
return objaddr_detected_dic[search_string], objaddr_detected_dic
else:
#print('No dic value: ')
latlon = getGeocoderLatLon_crim(distr, addr, house, korpus)
objaddr_detected_dic[search_string] = latlon
return latlon, objaddr_detected_dic
#for filename in ['0_Жительства_1.xls', '0_Жительства_2.xls']:
for filename in ['0_Жительства_1_fix.xls']:
address_frame = pd.read_excel('input\\' +filename)
#***
address_frame = address_frame[24997:] #TODO: 4000..4100 - seconf file, first file - chck end (div 1000?)
#***
len = address_frame.shape[0]
objaddr_detected_dic = {} #Словарь координат найденных объектов
for index, row in address_frame.iterrows():
#print("{} from {}".format(index, len))
latlon_new, objaddr_detected_dic = getLatLon(row['district'], row['street'], row['house'], row['corpus'], objaddr_detected_dic)
#print(latlon_new)
address_frame.loc[index, 'latitude'] = latlon_new[0]
address_frame.loc[index, 'longitude'] = latlon_new[1]
print(index)
#if index>0 and index % 500 == 0 or index==len-1:
#with open('output\\database.csv', 'a') as f:
#address_frame[index-500:index].to_csv(f, header=False)
#address_frame[:index].to_excel('output\\database.xlsx')
address_frame.to_excel('output\\database2part.xlsx')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.